diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index fcf34e569555..5cf566de0f9a 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -37,9 +37,13 @@ docs/bigquery_datatransfer/ @googleapis/api-bigquery bigquery_storage/ @googleapis/api-bigquery docs/bigquery_storage/ @googleapis/api-bigquery +# Data Catalog isn't technically part of BigQuery, but it's closely related. +datacatalog/ @googleapis/api-bigquery +docs/datacatalog/ @googleapis/api-bigquery + # Pubsub -pubsub/ @anguillanneuf @plamut -docs/pubsub @anguillanneuf @plamut +pubsub/ @anguillanneuf @plamut @pradn +docs/pubsub @anguillanneuf @plamut @pradn # Tim Swast is the primary author of Runtime Config. diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 000000000000..d0bd7b92b3c7 --- /dev/null +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,7 @@ +Thank you for opening a Pull Request! Before submitting your PR, there are a few things you can do to make sure it goes smoothly: +- [ ] Make sure to open an issue as a [bug/issue](https://github.com/googleapis/google-cloud-python/issues) before writing your code! That way we can discuss the change, evaluate designs, and agree on the general idea +- [ ] Ensure the tests and linter pass +- [ ] Code coverage does not decrease (if any source code was changed) +- [ ] Appropriate docs were updated (if necessary) + +Fixes # 🦕 \ No newline at end of file diff --git a/.kokoro/continuous/recommender.cfg b/.kokoro/continuous/recommender.cfg new file mode 100644 index 000000000000..7d65909e6343 --- /dev/null +++ b/.kokoro/continuous/recommender.cfg @@ -0,0 +1,7 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Tell the trampoline which build file to use. +env_vars: { + key: "PACKAGE" + value: "recommender" +} diff --git a/.kokoro/docs/recommender.cfg b/.kokoro/docs/recommender.cfg new file mode 100644 index 000000000000..7d65909e6343 --- /dev/null +++ b/.kokoro/docs/recommender.cfg @@ -0,0 +1,7 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Tell the trampoline which build file to use. +env_vars: { + key: "PACKAGE" + value: "recommender" +} diff --git a/.kokoro/presubmit/recommender.cfg b/.kokoro/presubmit/recommender.cfg new file mode 100644 index 000000000000..7d65909e6343 --- /dev/null +++ b/.kokoro/presubmit/recommender.cfg @@ -0,0 +1,7 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Tell the trampoline which build file to use. +env_vars: { + key: "PACKAGE" + value: "recommender" +} diff --git a/.kokoro/release/recommender.cfg b/.kokoro/release/recommender.cfg new file mode 100644 index 000000000000..7d65909e6343 --- /dev/null +++ b/.kokoro/release/recommender.cfg @@ -0,0 +1,7 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Tell the trampoline which build file to use. +env_vars: { + key: "PACKAGE" + value: "recommender" +} diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 3ffb6031096a..f606d71ec2e8 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -49,7 +49,7 @@ You'll have to create a development environment to hack on $ cd hack-on-google-cloud-python # Configure remotes such that you can pull changes from the google-cloud-python # repository into your local repository. - $ git remote add upstream git@github.com:GoogleCloudPlatform/google-cloud-python.git + $ git remote add upstream git@github.com:googleapis/google-cloud-python.git # fetch and merge changes from upstream into master $ git fetch upstream $ git merge upstream/master @@ -60,23 +60,25 @@ repo, from which you can submit a pull request. To work on the codebase and run the tests, we recommend using ``nox``, but you can also use a ``virtualenv`` of your own creation. -.. _repo: https://github.com/GoogleCloudPlatform/google-cloud-python +.. _repo: https://github.com/googleapis/google-cloud-python Using ``nox`` ============= We use `nox `__ to instrument our tests. -- To test your changes, run unit tests with ``nox``:: +You must install nox using Python 3. - $ nox -f datastore/noxfile.py -s unit-2.7 - $ nox -f datastore/noxfile.py -s unit-3.7 +- To test your changes, go to the package directory and run ``nox``:: + + $ nox -s unit-2.7 + $ nox -s unit-3.7 $ ... .. note:: The unit tests and system tests are contained in the individual - ``nox.py`` files in each directory; substitute ``datastore`` in the + ``noxfile.py`` files in each directory; substitute ``datastore`` in the example above with the package of your choice. @@ -85,7 +87,7 @@ We use `nox `__ to instrument our tests. $ export GIT_ROOT=$(pwd) $ cd ${GIT_ROOT}/datastore/ - $ nox -s "unit(py='3.7')" + $ nox -s unit-3.7 .. nox: https://pypi.org/project/nox-automation/ @@ -122,9 +124,13 @@ On Debian/Ubuntu:: Coding Style ************ -- PEP8 compliance, with exceptions defined in the linter configuration. - If you have ``nox`` installed, you can test that you have not introduced - any non-compliant code via:: +- We use the automatic code formatter ``black``. You can run it using + the nox session ``blacken``. This will eliminate many lint errors. + + $ nox -s blacken + +- PEP8 compliance is required, with exceptions defined in the linter configuration. + You can test for non-compliant code via:: $ nox -s lint @@ -135,8 +141,8 @@ Coding Style export GOOGLE_CLOUD_TESTING_BRANCH="master" By doing this, you are specifying the location of the most up-to-date - version of ``google-cloud-python``. The the suggested remote name ``upstream`` - should point to the official ``GoogleCloudPlatform`` checkout and the + version of ``google-cloud-python``. The suggested remote name ``upstream`` + should point to the official ``googleapis`` checkout and the the branch should be the main branch on that remote (``master``). Exceptions to PEP8: @@ -149,10 +155,11 @@ Exceptions to PEP8: Running System Tests ******************** -- To run system tests for a given package, you can execute:: +- To run system tests for a given package, go to the package directory + and execute:: - $ nox -f datastore/noxfile.py -s system-3.7 - $ nox -f datastore/noxfile.py -s system-2.7 + $ nox -s system-3.7 + $ nox -s system-2.7 .. note:: @@ -216,9 +223,10 @@ Running System Tests Running Generated Sample Tests ****************************** -- To run system tests for a given package, you can execute:: +- To run system tests for a given package, go to the package directory + and execute:: - $ nox -f speech/noxfile.py -s samples + $ nox -s samples .. note:: @@ -246,33 +254,13 @@ documentation in this package which references that API or behavior must be changed to reflect the bug fix, ideally in the same commit that fixes the bug or adds the feature. -To build and review docs (where ``${VENV}`` refers to the virtualenv you're -using to develop ``google-cloud-python``): - -#. After following the steps above in "Using a Development Checkout", install - Sphinx and all development requirements in your virtualenv:: - - $ cd ${HOME}/hack-on-google-cloud-python - $ ${VENV}/bin/pip install Sphinx +To build and review docs go to the package directory and execute:: -#. Change into the ``docs`` directory within your ``google-cloud-python`` checkout and - execute the ``make`` command with some flags:: - - $ cd ${HOME}/hack-on-google-cloud-python/google-cloud-python/docs - $ make clean html SPHINXBUILD=${VENV}/bin/sphinx-build - - The ``SPHINXBUILD=...`` argument tells Sphinx to use the virtualenv Python, - which will have both Sphinx and ``google-cloud-python`` (for API documentation - generation) installed. +$ nox -s docs #. Open the ``docs/_build/html/index.html`` file to see the resulting HTML rendering. -As an alternative to 1. and 2. above, if you have ``nox`` installed, you -can build the docs via:: - - $ nox -s docs - ******************************************** Note About ``README`` as it pertains to PyPI ******************************************** @@ -287,13 +275,10 @@ may cause problems creating links or rendering the description. .. _description on PyPI: https://pypi.org/project/google-cloud/ ********************** -CircleCI Configuration +Kokoro Configuration ********************** -All build scripts in the ``.circleci/config.yml`` configuration file which have -Python dependencies are specified in the ``nox.py`` configuration. -They are executed in the Travis build via ``nox -s ${ENV}`` where -``${ENV}`` is the environment being tested. +Build scripts and configurations are in the ``.kokoro`` directory. ************************* diff --git a/README.rst b/README.rst index 0ea6c611ec63..fe68914385bf 100644 --- a/README.rst +++ b/README.rst @@ -28,46 +28,71 @@ priority. The following client libraries have **GA** support: -- `Google BigQuery`_ (`BigQuery README`_) -- `Google Cloud Bigtable`_ (`Bigtable README`_) -- `Google Cloud Datastore`_ (`Datastore README`_) -- `Google Cloud KMS`_ (`KMS README`_) -- `Google Cloud Natural Language`_ (`Natural Language README`_) -- `Google Cloud Pub/Sub`_ (`Pub/Sub README`_) -- `Google Cloud Scheduler`_ (`Scheduler README`_) -- `Google Cloud Spanner`_ (`Spanner README`_) -- `Google Cloud Speech`_ (`Speech README`_) -- `Google Cloud Storage`_ (`Storage README`_) -- `Google Cloud Tasks`_ (`Tasks README`_) -- `Google Cloud Translation`_ (`Translation README`_) -- `Stackdriver Logging`_ (`Logging README`_) +- `Google BigQuery`_ (`BigQuery README`_, `BigQuery Documentation`_) +- `Google Cloud Bigtable`_ (`Bigtable README`_, `Bigtable Documentation`_) +- `Google Cloud Datastore`_ (`Datastore README`_, `Datastore Documentation`_) +- `Google Cloud KMS`_ (`KMS README`_, `KMS Documentation`_) +- `Google Cloud Natural Language`_ (`Natural Language README`_, `Natural Language Documentation`_) +- `Google Cloud Pub/Sub`_ (`Pub/Sub README`_, `Pub/Sub Documentation`_) +- `Google Cloud Scheduler`_ (`Scheduler README`_, `Scheduler Documentation`_) +- `Google Cloud Spanner`_ (`Spanner README`_, `Spanner Documentation`_) +- `Google Cloud Speech to Text`_ (`Speech to Text README`_, `Speech to Text Documentation`_) +- `Google Cloud Storage`_ (`Storage README`_, `Storage Documentation`_) +- `Google Cloud Tasks`_ (`Tasks README`_, `Tasks Documentation`_) +- `Google Cloud Translation`_ (`Translation README`_, `Translation Documentation`_) +- `Stackdriver Logging`_ (`Logging README`_, `Logging Documentation`_) .. _Google BigQuery: https://pypi.org/project/google-cloud-bigquery/ .. _BigQuery README: https://github.com/googleapis/google-cloud-python/tree/master/bigquery +.. _BigQuery Documentation: https://googleapis.dev/python/bigquery/latest + .. _Google Cloud Bigtable: https://pypi.org/project/google-cloud-bigtable/ .. _Bigtable README: https://github.com/googleapis/google-cloud-python/tree/master/bigtable +.. _Bigtable Documentation: https://googleapis.dev/python/bigtable/latest + .. _Google Cloud Datastore: https://pypi.org/project/google-cloud-datastore/ .. _Datastore README: https://github.com/googleapis/google-cloud-python/tree/master/datastore +.. _Datastore Documentation: https://googleapis.dev/python/datastore/latest + .. _Google Cloud KMS: https://pypi.org/project/google-cloud-kms/ .. _KMS README: https://github.com/googleapis/google-cloud-python/tree/master/kms +.. _KMS Documentation: https://googleapis.dev/python/cloudkms/latest + .. _Google Cloud Natural Language: https://pypi.org/project/google-cloud-language/ .. _Natural Language README: https://github.com/googleapis/google-cloud-python/tree/master/language +.. _Natural Language Documentation: https://googleapis.dev/python/language/latest + .. _Google Cloud Pub/Sub: https://pypi.org/project/google-cloud-pubsub/ .. _Pub/Sub README: https://github.com/googleapis/google-cloud-python/tree/master/pubsub +.. _Pub/Sub Documentation: https://googleapis.dev/python/pubsub/latest + .. _Google Cloud Spanner: https://pypi.org/project/google-cloud-spanner .. _Spanner README: https://github.com/googleapis/google-cloud-python/tree/master/spanner -.. _Google Cloud Speech: https://pypi.org/project/google-cloud-speech/ -.. _Speech README: https://github.com/googleapis/google-cloud-python/tree/master/speech +.. _Spanner Documentation: https://googleapis.dev/python/spanner/latest + +.. _Google Cloud Speech to Text: https://pypi.org/project/google-cloud-speech/ +.. _Speech to Text README: https://github.com/googleapis/google-cloud-python/tree/master/speech +.. _Speech to Text Documentation: https://googleapis.dev/python/speech/latest + .. _Google Cloud Storage: https://pypi.org/project/google-cloud-storage/ .. _Storage README: https://github.com/googleapis/google-cloud-python/tree/master/storage +.. _Storage Documentation: https://googleapis.dev/python/storage/latest + .. _Google Cloud Tasks: https://pypi.org/project/google-cloud-tasks/ .. _Tasks README: https://github.com/googleapis/google-cloud-python/tree/master/tasks +.. _Tasks Documentation: https://googleapis.dev/python/cloudtasks/latest + .. _Google Cloud Translation: https://pypi.org/project/google-cloud-translate/ .. _Translation README: https://github.com/googleapis/google-cloud-python/tree/master/translate +.. _Translation Documentation: https://googleapis.dev/python/translation/latest + .. _Google Cloud Scheduler: https://pypi.org/project/google-cloud-scheduler/ .. _Scheduler README: https://github.com/googleapis/google-cloud-python/tree/master/scheduler +.. _Scheduler Documentation: https://googleapis.dev/python/cloudscheduler/latest + .. _Stackdriver Logging: https://pypi.org/project/google-cloud-logging/ .. _Logging README: https://github.com/googleapis/google-cloud-python/tree/master/logging +.. _Logging Documentation: https://googleapis.dev/python/logging/latest Beta Support ------------ @@ -78,16 +103,21 @@ against beta libraries are addressed with a higher priority. The following client libraries have **beta** support: -- `Google Cloud Firestore`_ (`Firestore README`_) -- `Google Cloud Video Intelligence`_ (`Video Intelligence README`_) -- `Google Cloud Vision`_ (`Vision README`_) +- `Google Cloud Firestore`_ (`Firestore README`_, `Firestore Documentation`_) +- `Google Cloud Video Intelligence`_ (`Video Intelligence README`_, `Video Intelligence Documentation`_) +- `Google Cloud Vision`_ (`Vision README`_, `Vision Documentation`_) .. _Google Cloud Firestore: https://pypi.org/project/google-cloud-firestore/ .. _Firestore README: https://github.com/googleapis/google-cloud-python/tree/master/firestore +.. _Firestore Documentation: https://googleapis.dev/python/firestore/latest + .. _Google Cloud Video Intelligence: https://pypi.org/project/google-cloud-videointelligence .. _Video Intelligence README: https://github.com/googleapis/google-cloud-python/tree/master/videointelligence +.. _Video Intelligence Documentation: https://googleapis.dev/python/videointelligence/latest + .. _Google Cloud Vision: https://pypi.org/project/google-cloud-vision/ .. _Vision README: https://github.com/googleapis/google-cloud-python/tree/master/vision +.. _Vision Documentation: https://googleapis.dev/python/vision/latest Alpha Support @@ -99,64 +129,106 @@ updates. See `versioning`_ for more details. The following client libraries have **alpha** support: -- `Google Cloud Asset`_ (`Asset README`_) -- `Google Cloud AutoML`_ (`AutoML README`_) -- `Google BigQuery Data Transfer`_ (`BigQuery Data Transfer README`_) -- `Google Cloud Bigtable - HappyBase`_ (`HappyBase README`_) -- `Google Cloud Container`_ (`Container README`_) -- `Google Cloud Container Analysis`_ (`Container Analysis README`_) -- `Google Cloud Dataproc`_ (`Dataproc README`_) -- `Google Cloud DLP`_ (`DLP README`_) -- `Google Cloud DNS`_ (`DNS README`_) -- `Google Cloud IoT`_ (`IoT README`_) -- `Google Cloud Memorystore for Redis`_ (`Redis README`_) -- `Google Cloud Resource Manager`_ (`Resource Manager README`_) -- `Google Cloud Runtime Configuration`_ (`Runtime Config README`_) -- `Google Cloud Security Scanner`_ (`Security Scanner README`_ ) -- `Google Cloud Trace`_ (`Trace README`_) -- `Google Cloud Text-to-Speech`_ (`Text-to-Speech README`_) -- `Grafeas`_ (`Grafeas README`_) -- `Stackdriver Error Reporting`_ (`Error Reporting README`_) -- `Stackdriver Monitoring`_ (`Monitoring README`_) +- `Google Cloud Asset`_ (`Asset README`_, `Asset Documentation`_) +- `Google Cloud AutoML`_ (`AutoML README`_, `AutoML Documentation`_) +- `Google BigQuery Data Transfer`_ (`BigQuery Data Transfer README`_, `BigQuery Documentation`_) +- `Google Cloud Bigtable - HappyBase`_ (`HappyBase README`_, `HappyBase Documentation`_) +- `Google Cloud Container`_ (`Container README`_, `Container Documentation`_) +- `Google Cloud Container Analysis`_ (`Container Analysis README`_, `Container Analysis Documentation`_) +- `Google Cloud Dataproc`_ (`Dataproc README`_, `Dataproc Documentation`_) +- `Google Cloud DLP`_ (`DLP README`_, `DLP Documentation`_) +- `Google Cloud DNS`_ (`DNS README`_, `DNS Documentation`_) +- `Google Cloud IoT`_ (`IoT README`_, `IoT Documentation`_) +- `Google Cloud Memorystore for Redis`_ (`Redis README`_, `Redis Documentation`_) +- `Google Cloud Recommender`_ (`Recommender README`_, `Recommender Documentation`_) +- `Google Cloud Resource Manager`_ (`Resource Manager README`_, `Resource Manager Documentation`_) +- `Google Cloud Runtime Configuration`_ (`Runtime Config README`_, `Runtime Config Documentation`_) +- `Google Cloud Security Scanner`_ (`Security Scanner README`_ , `Security Scanner Documentation`_) +- `Google Cloud Trace`_ (`Trace README`_, `Trace Documentation`_) +- `Google Cloud Text-to-Speech`_ (`Text-to-Speech README`_, `Text-to-Speech Documentation`_) +- `Grafeas`_ (`Grafeas README`_, `Grafeas Documentation`_) +- `Stackdriver Error Reporting`_ (`Error Reporting README`_, `Error Reporting Documentation`_) +- `Stackdriver Monitoring`_ (`Monitoring README`_, `Monitoring Documentation`_) .. _Google Cloud Asset: https://pypi.org/project/google-cloud-asset/ .. _Asset README: https://github.com/googleapis/google-cloud-python/blob/master/asset +.. _Asset Documentation: https://googleapis.dev/python/cloudasset/latest + .. _Google Cloud AutoML: https://pypi.org/project/google-cloud-automl/ .. _AutoML README: https://github.com/googleapis/google-cloud-python/blob/master/automl +.. _AutoML Documentation: https://googleapis.dev/python/automl/latest + .. _Google BigQuery Data Transfer: https://pypi.org/project/google-cloud-bigquery-datatransfer/ .. _BigQuery Data Transfer README: https://github.com/googleapis/google-cloud-python/tree/master/bigquery_datatransfer +.. _BigQuery Documentation: https://googleapis.dev/python/bigquery/latest + .. _Google Cloud Bigtable - HappyBase: https://pypi.org/project/google-cloud-happybase/ .. _HappyBase README: https://github.com/googleapis/google-cloud-python-happybase +.. _HappyBase Documentation: https://google-cloud-python-happybase.readthedocs.io/en/latest/ + .. _Google Cloud Container: https://pypi.org/project/google-cloud-container/ .. _Container README: https://github.com/googleapis/google-cloud-python/tree/master/container +.. _Container Documentation: https://googleapis.dev/python/container/latest + .. _Google Cloud Container Analysis: https://pypi.org/project/google-cloud-containeranalysis/ .. _Container Analysis README: https://github.com/googleapis/google-cloud-python/tree/master/containeranalysis +.. _Container Analysis Documentation: https://googleapis.dev/python/containeranalysis/latest + .. _Google Cloud Dataproc: https://pypi.org/project/google-cloud-dataproc/ .. _Dataproc README: https://github.com/googleapis/google-cloud-python/tree/master/dataproc +.. _Dataproc Documentation: https://googleapis.dev/python/dataproc/latest + .. _Google Cloud DLP: https://pypi.org/project/google-cloud-dlp/ .. _DLP README: https://github.com/googleapis/google-cloud-python/tree/master/dlp +.. _DLP Documentation: https://googleapis.dev/python/dlp/latest + .. _Google Cloud DNS: https://pypi.org/project/google-cloud-dns/ .. _DNS README: https://github.com/googleapis/google-cloud-python/tree/master/dns +.. _DNS Documentation: https://googleapis.dev/python/dns/latest + .. _Google Cloud IoT: https://pypi.org/project/google-cloud-iot/ .. _IoT README: https://github.com/googleapis/google-cloud-python/tree/master/iot +.. _IoT Documentation: https://googleapis.dev/python/cloudiot/latest + .. _Google Cloud Memorystore for Redis: https://pypi.org/project/google-cloud-redis/ .. _Redis README: https://github.com/googleapis/google-cloud-python/tree/master/redis +.. _Redis Documentation: https://googleapis.dev/python/redis/latest + +.. _Google Cloud Recommender: https://pypi.org/project/google-cloud-recommender/ +.. _Recommender README: https://github.com/googleapis/google-cloud-python/tree/master/recommender +.. _Recommender Documentation: https://googleapis.dev/python/recommender/latest + .. _Google Cloud Resource Manager: https://pypi.org/project/google-cloud-resource-manager/ .. _Resource Manager README: https://github.com/googleapis/google-cloud-python/tree/master/resource_manager +.. _Resource Manager Documentation: https://googleapis.dev/python/cloudresourcemanager/latest + .. _Google Cloud Runtime Configuration: https://pypi.org/project/google-cloud-runtimeconfig/ .. _Runtime Config README: https://github.com/googleapis/google-cloud-python/tree/master/runtimeconfig +.. _Runtime Config Documentation: https://googleapis.dev/python/runtimeconfig/latest + .. _Google Cloud Security Scanner: https://pypi.org/project/google-cloud-websecurityscanner/ .. _Security Scanner README: https://github.com/googleapis/google-cloud-python/blob/master/websecurityscanner +.. _Security Scanner Documentation: https://googleapis.dev/python/websecurityscanner/latest + .. _Google Cloud Text-to-Speech: https://pypi.org/project/google-cloud-texttospeech/ .. _Text-to-Speech README: https://github.com/googleapis/google-cloud-python/tree/master/texttospeech +.. _Text-to-Speech Documentation: https://googleapis.dev/python/texttospeech/latest + .. _Google Cloud Trace: https://pypi.org/project/google-cloud-trace/ .. _Trace README: https://github.com/googleapis/google-cloud-python/tree/master/trace +.. _Trace Documentation: https://googleapis.dev/python/cloudtrace/latest + .. _Grafeas: https://pypi.org/project/grafeas/ .. _Grafeas README: https://github.com/googleapis/google-cloud-python/tree/master/grafeas +.. _Grafeas Documentation: https://googleapis.dev/python/grafeas/latest + .. _Stackdriver Error Reporting: https://pypi.org/project/google-cloud-error-reporting/ .. _Error Reporting README: https://github.com/googleapis/google-cloud-python/tree/master/error_reporting +.. _Error Reporting Documentation: https://googleapis.dev/python/clouderrorreporting/latest + .. _Stackdriver Monitoring: https://pypi.org/project/google-cloud-monitoring/ .. _Monitoring README: https://github.com/googleapis/google-cloud-python/tree/master/monitoring +.. _Monitoring Documentation: https://googleapis.dev/python/monitoring/latest .. _versioning: https://github.com/googleapis/google-cloud-python/blob/master/CONTRIBUTING.rst#versioning diff --git a/api_core/CHANGELOG.md b/api_core/CHANGELOG.md index 74ab16564dbc..1b1ec96d717b 100644 --- a/api_core/CHANGELOG.md +++ b/api_core/CHANGELOG.md @@ -4,6 +4,25 @@ [1]: https://pypi.org/project/google-api-core/#history +## 1.14.3 + +10-07-2019 10:35 PDT + + +### Implementation Changes +- Finalize during close of 'ResumableBidiRpc' ([#9337](https://github.com/googleapis/google-cloud-python/pull/9337)) +- add on_error to Retry.__init__ ([#8892](https://github.com/googleapis/google-cloud-python/pull/8892)) +- Fix race in 'BackgroundConsumer._thread_main'. ([#8883](https://github.com/googleapis/google-cloud-python/pull/8883)) + +### Documentation +- Fix intersphinx reference to requests ([#9294](https://github.com/googleapis/google-cloud-python/pull/9294)) +- Fix broken links in docs. ([#9148](https://github.com/googleapis/google-cloud-python/pull/9148)) +- About of time -> amount of time ([#9052](https://github.com/googleapis/google-cloud-python/pull/9052)) +- Remove compatability badges from READMEs. ([#9035](https://github.com/googleapis/google-cloud-python/pull/9035)) + +### Internal / Testing Changes +- Remove CI for gh-pages, use googleapis.dev for api_core refs. ([#9085](https://github.com/googleapis/google-cloud-python/pull/9085)) + ## 1.14.2 07-30-2019 14:08 PDT diff --git a/api_core/docs/auth.rst b/api_core/docs/auth.rst index d0e84c65ecac..cec7c16ddf29 100644 --- a/api_core/docs/auth.rst +++ b/api_core/docs/auth.rst @@ -177,22 +177,6 @@ described above, so be sure none of the other possible environments conflict with your user provided credentials. -Advanced users of `oauth2client`_ can also use custom flows to -create credentials using `client secrets`_ or using a -`webserver flow`_. -After creation, :class:`Credentials ` -can be serialized with -:meth:`to_json() ` -and stored in a file and then and deserialized with -:meth:`from_json() `. In order -to use ``oauth2client``'s credentials with this library, you'll need to -`convert them`_. - -.. _oauth2client: https://github.com/Google/oauth2client -.. _client secrets: https://developers.google.com/api-client-library/python/guide/aaa_oauth#flow_from_clientsecrets -.. _webserver flow: https://developers.google.com/api-client-library/python/guide/aaa_oauth#OAuth2WebServerFlow -.. _convert them: http://google-auth.readthedocs.io/en/stable/user-guide.html#user-credentials - Troubleshooting =============== diff --git a/api_core/google/api_core/bidi.py b/api_core/google/api_core/bidi.py index f73c7c9dfabc..b171a4112a31 100644 --- a/api_core/google/api_core/bidi.py +++ b/api_core/google/api_core/bidi.py @@ -561,6 +561,10 @@ def _recv(self): def recv(self): return self._recoverable(self._recv) + def close(self): + self._finalize(None) + super(ResumableBidiRpc, self).close() + @property def is_active(self): """bool: True if this stream is currently open and active.""" @@ -698,7 +702,11 @@ def stop(self): if self._thread is not None: # Resume the thread to wake it up in case it is sleeping. self.resume() - self._thread.join() + # The daemonized thread may itself block, so don't wait + # for it longer than a second. + self._thread.join(1.0) + if self._thread.is_alive(): # pragma: NO COVER + _LOGGER.warning("Background thread did not exit.") self._thread = None diff --git a/api_core/google/api_core/page_iterator.py b/api_core/google/api_core/page_iterator.py index 3ac5904399b0..11a92d38f3ce 100644 --- a/api_core/google/api_core/page_iterator.py +++ b/api_core/google/api_core/page_iterator.py @@ -96,14 +96,22 @@ class Page(object): Callable to convert an item from the type in the raw API response into the native object. Will be called with the iterator and a single item. + raw_page Optional[google.protobuf.message.Message]: + The raw page response. """ - def __init__(self, parent, items, item_to_value): + def __init__(self, parent, items, item_to_value, raw_page=None): self._parent = parent self._num_items = len(items) self._remaining = self._num_items self._item_iter = iter(items) self._item_to_value = item_to_value + self._raw_page = raw_page + + @property + def raw_page(self): + """google.protobuf.message.Message""" + return self._raw_page @property def num_items(self): @@ -360,7 +368,7 @@ def _next_page(self): if self._has_next_page(): response = self._get_next_page_response() items = response.get(self._items_key, ()) - page = Page(self, items, self.item_to_value) + page = Page(self, items, self.item_to_value, raw_page=response) self._page_start(self, page, response) self.next_page_token = response.get(self._next_token) return page @@ -527,7 +535,7 @@ def _next_page(self): self.next_page_token = getattr(response, self._response_token_field) items = getattr(response, self._items_field) - page = Page(self, items, self.item_to_value) + page = Page(self, items, self.item_to_value, raw_page=response) return page diff --git a/api_core/setup.py b/api_core/setup.py index 25b7072d91f2..16802fef4623 100644 --- a/api_core/setup.py +++ b/api_core/setup.py @@ -22,7 +22,7 @@ name = "google-api-core" description = "Google API client core library" -version = "1.14.2" +version = "1.14.3" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' diff --git a/api_core/tests/unit/test_bidi.py b/api_core/tests/unit/test_bidi.py index 4d185d3158e4..52215cbde22f 100644 --- a/api_core/tests/unit/test_bidi.py +++ b/api_core/tests/unit/test_bidi.py @@ -597,6 +597,31 @@ def test_recv_failure(self): assert bidi_rpc.is_active is False assert call.cancelled is True + def test_close(self): + call = mock.create_autospec(_CallAndFuture, instance=True) + + def cancel_side_effect(): + call.is_active.return_value = False + + call.cancel.side_effect = cancel_side_effect + start_rpc = mock.create_autospec( + grpc.StreamStreamMultiCallable, instance=True, return_value=call + ) + should_recover = mock.Mock(spec=["__call__"], return_value=False) + bidi_rpc = bidi.ResumableBidiRpc(start_rpc, should_recover) + bidi_rpc.open() + + bidi_rpc.close() + + should_recover.assert_not_called() + call.cancel.assert_called_once() + assert bidi_rpc.call == call + assert bidi_rpc.is_active is False + # ensure the request queue was signaled to stop. + assert bidi_rpc.pending_requests == 1 + assert bidi_rpc._request_queue.get() is None + assert bidi_rpc._finalized + def test_reopen_failure_on_rpc_restart(self): error1 = ValueError("1") error2 = ValueError("2") diff --git a/api_core/tests/unit/test_page_iterator.py b/api_core/tests/unit/test_page_iterator.py index 6335001bcf41..2bf742492889 100644 --- a/api_core/tests/unit/test_page_iterator.py +++ b/api_core/tests/unit/test_page_iterator.py @@ -36,9 +36,10 @@ def test_constructor(self): assert page.remaining == 3 assert page._parent is parent assert page._item_to_value is item_to_value + assert page.raw_page is None def test___iter__(self): - page = page_iterator.Page(None, (), None) + page = page_iterator.Page(None, (), None, None) assert iter(page) is page def test_iterator_calls_parent_item_to_value(self): @@ -69,6 +70,18 @@ def test_iterator_calls_parent_item_to_value(self): item_to_value.assert_called_with(parent, 12) assert page.remaining == 97 + def test_raw_page(self): + parent = mock.sentinel.parent + item_to_value = mock.sentinel.item_to_value + + raw_page = mock.sentinel.raw_page + + page = page_iterator.Page(parent, (1, 2, 3), item_to_value, raw_page=raw_page) + assert page.raw_page is raw_page + + with pytest.raises(AttributeError): + page.raw_page = None + class PageIteratorImpl(page_iterator.Iterator): def _next_page(self): @@ -116,8 +129,7 @@ def test_pages_property_restart(self): def test__page_iter_increment(self): iterator = PageIteratorImpl(None, None) page = page_iterator.Page( - iterator, ("item",), page_iterator._item_to_value_identity - ) + iterator, ("item",), page_iterator._item_to_value_identity) iterator._next_page = mock.Mock(side_effect=[page, None]) assert iterator.num_results == 0 @@ -147,11 +159,9 @@ def test__items_iter(self): # Make pages from mock responses parent = mock.sentinel.parent page1 = page_iterator.Page( - parent, (item1, item2), page_iterator._item_to_value_identity - ) + parent, (item1, item2), page_iterator._item_to_value_identity) page2 = page_iterator.Page( - parent, (item3,), page_iterator._item_to_value_identity - ) + parent, (item3,), page_iterator._item_to_value_identity) iterator = PageIteratorImpl(None, None) iterator._next_page = mock.Mock(side_effect=[page1, page2, None]) diff --git a/asset/CHANGELOG.md b/asset/CHANGELOG.md index 049d0f2c3017..3f362e050225 100644 --- a/asset/CHANGELOG.md +++ b/asset/CHANGELOG.md @@ -4,6 +4,20 @@ [1]: https://pypi.org/project/google-cloud-asset/#history +## 0.5.0 + +10-29-2019 14:26 PDT + +### New Features +- Add `bigquery_destination` to `OutputConfig`; make `content_type` optional argument to `BatchGetAssetsHistoryRequest`; add `uri_prefix` to `GcsDestination`; add `ORG_POLICY` and `ACCESS_POLICY` content type enums ([#9555](https://github.com/googleapis/google-cloud-python/pull/9555)) + +### Documentation +- Remove compatability badges from READMEs. ([#9035](https://github.com/googleapis/google-cloud-python/pull/9035)) + +### Internal / Testing Changes +- Fix intersphinx reference to requests ([#9294](https://github.com/googleapis/google-cloud-python/pull/9294)) +- Remove CI for gh-pages; use googleapis.dev for api_core refs ([#9085](https://github.com/googleapis/google-cloud-python/pull/9085)) + ## 0.4.1 08-12-2019 13:44 PDT diff --git a/asset/google/cloud/asset_v1/gapic/asset_service_client.py b/asset/google/cloud/asset_v1/gapic/asset_service_client.py index ab9078cc62c7..e4b3c18bc799 100644 --- a/asset/google/cloud/asset_v1/gapic/asset_service_client.py +++ b/asset/google/cloud/asset_v1/gapic/asset_service_client.py @@ -254,7 +254,7 @@ def export_assets( asset_types (list[str]): A list of asset types of which to take a snapshot for. For example: "compute.googleapis.com/Disk". If specified, only matching assets will be returned. See `Introduction to Cloud Asset - Inventory `__ + Inventory `__ for all supported asset types. content_type (~google.cloud.asset_v1.types.ContentType): Asset content type. If not specified, no content but the asset name will be returned. @@ -357,7 +357,7 @@ def batch_get_assets_history( parent (str): Required. The relative name of the root asset. It can only be an organization number (such as "organizations/123"), a project ID (such as "projects/my-project-id")", or a project number (such as "projects/12345"). - content_type (~google.cloud.asset_v1.types.ContentType): Required. The content type. + content_type (~google.cloud.asset_v1.types.ContentType): Optional. The content type. read_time_window (Union[dict, ~google.cloud.asset_v1.types.TimeWindow]): Optional. The time window for the asset history. Both start\_time and end\_time are optional and if set, it must be after 2018-10-02 UTC. If end\_time is not set, it is default to current timestamp. If start\_time @@ -372,7 +372,7 @@ def batch_get_assets_history( See `Resource Names `__ and `Resource Name - Format `__ + Format `__ for more info. The request becomes a no-op if the asset name list is empty, and the max diff --git a/asset/google/cloud/asset_v1/gapic/enums.py b/asset/google/cloud/asset_v1/gapic/enums.py index 780beae4ddef..38eb45ae0103 100644 --- a/asset/google/cloud/asset_v1/gapic/enums.py +++ b/asset/google/cloud/asset_v1/gapic/enums.py @@ -27,11 +27,15 @@ class ContentType(enum.IntEnum): CONTENT_TYPE_UNSPECIFIED (int): Unspecified content type. RESOURCE (int): Resource metadata. IAM_POLICY (int): The actual IAM policy set on a resource. + ORG_POLICY (int): The Cloud Organization Policy set on an asset. + ACCESS_POLICY (int): The Cloud Access context mananger Policy set on an asset. """ CONTENT_TYPE_UNSPECIFIED = 0 RESOURCE = 1 IAM_POLICY = 2 + ORG_POLICY = 4 + ACCESS_POLICY = 5 class NullValue(enum.IntEnum): diff --git a/asset/google/cloud/asset_v1/proto/asset_service.proto b/asset/google/cloud/asset_v1/proto/asset_service.proto index 0dfc2898b131..33dde9cd365b 100644 --- a/asset/google/cloud/asset_v1/proto/asset_service.proto +++ b/asset/google/cloud/asset_v1/proto/asset_service.proto @@ -18,8 +18,13 @@ syntax = "proto3"; package google.cloud.asset.v1; import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; import "google/cloud/asset/v1/assets.proto"; import "google/longrunning/operations.proto"; +import "google/protobuf/empty.proto"; +import "google/protobuf/field_mask.proto"; import "google/protobuf/timestamp.proto"; option csharp_namespace = "Google.Cloud.Asset.V1"; @@ -29,9 +34,11 @@ option java_outer_classname = "AssetServiceProto"; option java_package = "com.google.cloud.asset.v1"; option php_namespace = "Google\\Cloud\\Asset\\V1"; - // Asset service definition. service AssetService { + option (google.api.default_host) = "cloudasset.googleapis.com"; + option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; + // Exports assets with time and resource types to a given Cloud Storage // location. The output format is newline-delimited JSON. // This API implements the [google.longrunning.Operation][google.longrunning.Operation] API allowing you @@ -41,6 +48,10 @@ service AssetService { post: "/v1/{parent=*/*}:exportAssets" body: "*" }; + option (google.longrunning.operation_info) = { + response_type: "google.cloud.asset.v1.ExportAssetsResponse" + metadata_type: "google.cloud.asset.v1.ExportAssetsRequest" + }; } // Batch gets the update history of assets that overlap a time window. @@ -63,7 +74,12 @@ message ExportAssetsRequest { // organization number (such as "organizations/123"), a project ID (such as // "projects/my-project-id"), or a project number (such as "projects/12345"), // or a folder number (such as "folders/123"). - string parent = 1; + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + child_type: "cloudasset.googleapis.com/Asset" + } + ]; // Timestamp to take an asset snapshot. This can only be set to a timestamp // between 2018-10-02 UTC (inclusive) and the current time. If not specified, @@ -73,9 +89,9 @@ message ExportAssetsRequest { google.protobuf.Timestamp read_time = 2; // A list of asset types of which to take a snapshot for. For example: - // "compute.googleapis.com/Disk". If specified, only matching assets will be returned. - // See [Introduction to Cloud Asset - // Inventory](https://cloud.google.com/resource-manager/docs/cloud-asset-inventory/overview) + // "compute.googleapis.com/Disk". If specified, only matching assets will be + // returned. See [Introduction to Cloud Asset + // Inventory](https://cloud.google.com/asset-inventory/docs/overview) // for all supported asset types. repeated string asset_types = 3; @@ -85,7 +101,7 @@ message ExportAssetsRequest { // Required. Output configuration indicating where the results will be output // to. All results will be in newline delimited JSON format. - OutputConfig output_config = 5; + OutputConfig output_config = 5 [(google.api.field_behavior) = REQUIRED]; } // The export asset response. This message is returned by the @@ -105,21 +121,27 @@ message BatchGetAssetsHistoryRequest { // Required. The relative name of the root asset. It can only be an // organization number (such as "organizations/123"), a project ID (such as // "projects/my-project-id")", or a project number (such as "projects/12345"). - string parent = 1; + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + child_type: "cloudasset.googleapis.com/Asset" + } + ]; // A list of the full names of the assets. For example: // `//compute.googleapis.com/projects/my_project_123/zones/zone1/instances/instance1`. // See [Resource // Names](https://cloud.google.com/apis/design/resource_names#full_resource_name) - // and [Resource Name Format](https://cloud.google.com/resource-manager/docs/cloud-asset-inventory/resource-name-format) + // and [Resource Name + // Format](https://cloud.google.com/asset-inventory/docs/resource-name-format) // for more info. // // The request becomes a no-op if the asset name list is empty, and the max // size of the asset name list is 100 in one request. repeated string asset_names = 2; - // Required. The content type. - ContentType content_type = 3; + // Optional. The content type. + ContentType content_type = 3 [(google.api.field_behavior) = OPTIONAL]; // Optional. The time window for the asset history. Both start_time and // end_time are optional and if set, it must be after 2018-10-02 UTC. If @@ -127,7 +149,7 @@ message BatchGetAssetsHistoryRequest { // not set, the snapshot of the assets at end_time will be returned. The // returned results contain all temporal assets whose time window overlap with // read_time_window. - TimeWindow read_time_window = 4; + TimeWindow read_time_window = 4 [(google.api.field_behavior) = OPTIONAL]; } // Batch get assets history response. @@ -142,6 +164,12 @@ message OutputConfig { oneof destination { // Destination on Cloud Storage. GcsDestination gcs_destination = 1; + + // Destination on BigQuery. The output table stores the fields in asset + // proto as columns in BigQuery. The resource/iam_policy field is converted + // to a record with each field to a column, except metadata to a single JSON + // string. + BigQueryDestination bigquery_destination = 2; } } @@ -155,9 +183,40 @@ message GcsDestination { // Metadata](https://cloud.google.com/storage/docs/viewing-editing-metadata) // for more information. string uri = 1; + + // The uri prefix of all generated Cloud Storage objects. For example: + // "gs://bucket_name/object_name_prefix". Each object uri is in format: + // "gs://bucket_name/object_name_prefix// and only + // contains assets for that type. starts from 0. For example: + // "gs://bucket_name/object_name_prefix/compute.googleapis.com/Disk/0" is + // the first shard of output objects containing all + // compute.googleapis.com/Disk assets. An INVALID_ARGUMENT error will be + // returned if file with the same name "gs://bucket_name/object_name_prefix" + // already exists. + string uri_prefix = 2; } } +// A BigQuery destination. +message BigQueryDestination { + // Required. The BigQuery dataset in format + // "projects/projectId/datasets/datasetId", to which the snapshot result + // should be exported. If this dataset does not exist, the export call returns + // an error. + string dataset = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. The BigQuery table to which the snapshot result should be + // written. If this table does not exist, a new table with the given name + // will be created. + string table = 2 [(google.api.field_behavior) = REQUIRED]; + + // If the destination table already exists and this flag is `TRUE`, the + // table will be overwritten by the contents of assets snapshot. If the flag + // is not set and the destination table already exists, the export call + // returns an error. + bool force = 3; +} + // Asset content type. enum ContentType { // Unspecified content type. @@ -168,4 +227,10 @@ enum ContentType { // The actual IAM policy set on a resource. IAM_POLICY = 2; + + // The Cloud Organization Policy set on an asset. + ORG_POLICY = 4; + + // The Cloud Access context mananger Policy set on an asset. + ACCESS_POLICY = 5; } diff --git a/asset/google/cloud/asset_v1/proto/asset_service_pb2.py b/asset/google/cloud/asset_v1/proto/asset_service_pb2.py index 869d4dc3fee1..b382992b04c8 100644 --- a/asset/google/cloud/asset_v1/proto/asset_service_pb2.py +++ b/asset/google/cloud/asset_v1/proto/asset_service_pb2.py @@ -17,12 +17,17 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 +from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 from google.cloud.asset_v1.proto import ( assets_pb2 as google_dot_cloud_dot_asset__v1_dot_proto_dot_assets__pb2, ) from google.longrunning import ( operations_pb2 as google_dot_longrunning_dot_operations__pb2, ) +from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 +from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 @@ -34,12 +39,17 @@ "\n\031com.google.cloud.asset.v1B\021AssetServiceProtoP\001Z:google.golang.org/genproto/googleapis/cloud/asset/v1;asset\252\002\025Google.Cloud.Asset.V1\312\002\025Google\\Cloud\\Asset\\V1" ), serialized_pb=_b( - '\n/google/cloud/asset_v1/proto/asset_service.proto\x12\x15google.cloud.asset.v1\x1a\x1cgoogle/api/annotations.proto\x1a(google/cloud/asset_v1/proto/assets.proto\x1a#google/longrunning/operations.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xdf\x01\n\x13\x45xportAssetsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12-\n\tread_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x13\n\x0b\x61sset_types\x18\x03 \x03(\t\x12\x38\n\x0c\x63ontent_type\x18\x04 \x01(\x0e\x32".google.cloud.asset.v1.ContentType\x12:\n\routput_config\x18\x05 \x01(\x0b\x32#.google.cloud.asset.v1.OutputConfig"\x81\x01\n\x14\x45xportAssetsResponse\x12-\n\tread_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12:\n\routput_config\x18\x02 \x01(\x0b\x32#.google.cloud.asset.v1.OutputConfig"\xba\x01\n\x1c\x42\x61tchGetAssetsHistoryRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x13\n\x0b\x61sset_names\x18\x02 \x03(\t\x12\x38\n\x0c\x63ontent_type\x18\x03 \x01(\x0e\x32".google.cloud.asset.v1.ContentType\x12;\n\x10read_time_window\x18\x04 \x01(\x0b\x32!.google.cloud.asset.v1.TimeWindow"U\n\x1d\x42\x61tchGetAssetsHistoryResponse\x12\x34\n\x06\x61ssets\x18\x01 \x03(\x0b\x32$.google.cloud.asset.v1.TemporalAsset"_\n\x0cOutputConfig\x12@\n\x0fgcs_destination\x18\x01 \x01(\x0b\x32%.google.cloud.asset.v1.GcsDestinationH\x00\x42\r\n\x0b\x64\x65stination"-\n\x0eGcsDestination\x12\r\n\x03uri\x18\x01 \x01(\tH\x00\x42\x0c\n\nobject_uri*I\n\x0b\x43ontentType\x12\x1c\n\x18\x43ONTENT_TYPE_UNSPECIFIED\x10\x00\x12\x0c\n\x08RESOURCE\x10\x01\x12\x0e\n\nIAM_POLICY\x10\x02\x32\xc9\x02\n\x0c\x41ssetService\x12\x83\x01\n\x0c\x45xportAssets\x12*.google.cloud.asset.v1.ExportAssetsRequest\x1a\x1d.google.longrunning.Operation"(\x82\xd3\xe4\x93\x02""\x1d/v1/{parent=*/*}:exportAssets:\x01*\x12\xb2\x01\n\x15\x42\x61tchGetAssetsHistory\x12\x33.google.cloud.asset.v1.BatchGetAssetsHistoryRequest\x1a\x34.google.cloud.asset.v1.BatchGetAssetsHistoryResponse".\x82\xd3\xe4\x93\x02(\x12&/v1/{parent=*/*}:batchGetAssetsHistoryB\x9c\x01\n\x19\x63om.google.cloud.asset.v1B\x11\x41ssetServiceProtoP\x01Z:google.golang.org/genproto/googleapis/cloud/asset/v1;asset\xaa\x02\x15Google.Cloud.Asset.V1\xca\x02\x15Google\\Cloud\\Asset\\V1b\x06proto3' + '\n/google/cloud/asset_v1/proto/asset_service.proto\x12\x15google.cloud.asset.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a(google/cloud/asset_v1/proto/assets.proto\x1a#google/longrunning/operations.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\x8d\x02\n\x13\x45xportAssetsRequest\x12\x37\n\x06parent\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\x12\x1f\x63loudasset.googleapis.com/Asset\x12-\n\tread_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x13\n\x0b\x61sset_types\x18\x03 \x03(\t\x12\x38\n\x0c\x63ontent_type\x18\x04 \x01(\x0e\x32".google.cloud.asset.v1.ContentType\x12?\n\routput_config\x18\x05 \x01(\x0b\x32#.google.cloud.asset.v1.OutputConfigB\x03\xe0\x41\x02"\x81\x01\n\x14\x45xportAssetsResponse\x12-\n\tread_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12:\n\routput_config\x18\x02 \x01(\x0b\x32#.google.cloud.asset.v1.OutputConfig"\xed\x01\n\x1c\x42\x61tchGetAssetsHistoryRequest\x12\x37\n\x06parent\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\x12\x1f\x63loudasset.googleapis.com/Asset\x12\x13\n\x0b\x61sset_names\x18\x02 \x03(\t\x12=\n\x0c\x63ontent_type\x18\x03 \x01(\x0e\x32".google.cloud.asset.v1.ContentTypeB\x03\xe0\x41\x01\x12@\n\x10read_time_window\x18\x04 \x01(\x0b\x32!.google.cloud.asset.v1.TimeWindowB\x03\xe0\x41\x01"U\n\x1d\x42\x61tchGetAssetsHistoryResponse\x12\x34\n\x06\x61ssets\x18\x01 \x03(\x0b\x32$.google.cloud.asset.v1.TemporalAsset"\xab\x01\n\x0cOutputConfig\x12@\n\x0fgcs_destination\x18\x01 \x01(\x0b\x32%.google.cloud.asset.v1.GcsDestinationH\x00\x12J\n\x14\x62igquery_destination\x18\x02 \x01(\x0b\x32*.google.cloud.asset.v1.BigQueryDestinationH\x00\x42\r\n\x0b\x64\x65stination"C\n\x0eGcsDestination\x12\r\n\x03uri\x18\x01 \x01(\tH\x00\x12\x14\n\nuri_prefix\x18\x02 \x01(\tH\x00\x42\x0c\n\nobject_uri"N\n\x13\x42igQueryDestination\x12\x14\n\x07\x64\x61taset\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x12\n\x05table\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\r\n\x05\x66orce\x18\x03 \x01(\x08*l\n\x0b\x43ontentType\x12\x1c\n\x18\x43ONTENT_TYPE_UNSPECIFIED\x10\x00\x12\x0c\n\x08RESOURCE\x10\x01\x12\x0e\n\nIAM_POLICY\x10\x02\x12\x0e\n\nORG_POLICY\x10\x04\x12\x11\n\rACCESS_POLICY\x10\x05\x32\xf3\x03\n\x0c\x41ssetService\x12\xde\x01\n\x0c\x45xportAssets\x12*.google.cloud.asset.v1.ExportAssetsRequest\x1a\x1d.google.longrunning.Operation"\x82\x01\x82\xd3\xe4\x93\x02""\x1d/v1/{parent=*/*}:exportAssets:\x01*\xca\x41W\n*google.cloud.asset.v1.ExportAssetsResponse\x12)google.cloud.asset.v1.ExportAssetsRequest\x12\xb2\x01\n\x15\x42\x61tchGetAssetsHistory\x12\x33.google.cloud.asset.v1.BatchGetAssetsHistoryRequest\x1a\x34.google.cloud.asset.v1.BatchGetAssetsHistoryResponse".\x82\xd3\xe4\x93\x02(\x12&/v1/{parent=*/*}:batchGetAssetsHistory\x1aM\xca\x41\x19\x63loudasset.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformB\x9c\x01\n\x19\x63om.google.cloud.asset.v1B\x11\x41ssetServiceProtoP\x01Z:google.golang.org/genproto/googleapis/cloud/asset/v1;asset\xaa\x02\x15Google.Cloud.Asset.V1\xca\x02\x15Google\\Cloud\\Asset\\V1b\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_client__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, + google_dot_api_dot_resource__pb2.DESCRIPTOR, google_dot_cloud_dot_asset__v1_dot_proto_dot_assets__pb2.DESCRIPTOR, google_dot_longrunning_dot_operations__pb2.DESCRIPTOR, + google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, + google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR, google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, ], ) @@ -63,11 +73,17 @@ _descriptor.EnumValueDescriptor( name="IAM_POLICY", index=2, number=2, serialized_options=None, type=None ), + _descriptor.EnumValueDescriptor( + name="ORG_POLICY", index=3, number=4, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="ACCESS_POLICY", index=4, number=5, serialized_options=None, type=None + ), ], containing_type=None, serialized_options=None, - serialized_start=994, - serialized_end=1067, + serialized_start=1418, + serialized_end=1526, ) _sym_db.RegisterEnumDescriptor(_CONTENTTYPE) @@ -75,6 +91,8 @@ CONTENT_TYPE_UNSPECIFIED = 0 RESOURCE = 1 IAM_POLICY = 2 +ORG_POLICY = 4 +ACCESS_POLICY = 5 _EXPORTASSETSREQUEST = _descriptor.Descriptor( @@ -99,7 +117,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A!\022\037cloudasset.googleapis.com/Asset" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -171,7 +191,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -183,8 +203,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=217, - serialized_end=440, + serialized_start=365, + serialized_end=634, ) @@ -240,8 +260,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=443, - serialized_end=572, + serialized_start=637, + serialized_end=766, ) @@ -267,7 +287,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A!\022\037cloudasset.googleapis.com/Asset" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -303,7 +325,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -321,7 +343,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -333,8 +355,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=575, - serialized_end=761, + serialized_start=769, + serialized_end=1006, ) @@ -372,8 +394,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=763, - serialized_end=848, + serialized_start=1008, + serialized_end=1093, ) @@ -401,7 +423,25 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), + _descriptor.FieldDescriptor( + name="bigquery_destination", + full_name="google.cloud.asset.v1.OutputConfig.bigquery_destination", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), ], extensions=[], nested_types=[], @@ -419,8 +459,8 @@ fields=[], ) ], - serialized_start=850, - serialized_end=945, + serialized_start=1096, + serialized_end=1267, ) @@ -448,7 +488,25 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), + _descriptor.FieldDescriptor( + name="uri_prefix", + full_name="google.cloud.asset.v1.GcsDestination.uri_prefix", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), ], extensions=[], nested_types=[], @@ -466,8 +524,83 @@ fields=[], ) ], - serialized_start=947, - serialized_end=992, + serialized_start=1269, + serialized_end=1336, +) + + +_BIGQUERYDESTINATION = _descriptor.Descriptor( + name="BigQueryDestination", + full_name="google.cloud.asset.v1.BigQueryDestination", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="dataset", + full_name="google.cloud.asset.v1.BigQueryDestination.dataset", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\002"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="table", + full_name="google.cloud.asset.v1.BigQueryDestination.table", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\002"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="force", + full_name="google.cloud.asset.v1.BigQueryDestination.force", + index=2, + number=3, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1338, + serialized_end=1416, ) _EXPORTASSETSREQUEST.fields_by_name[ @@ -487,18 +620,31 @@ "assets" ].message_type = google_dot_cloud_dot_asset__v1_dot_proto_dot_assets__pb2._TEMPORALASSET _OUTPUTCONFIG.fields_by_name["gcs_destination"].message_type = _GCSDESTINATION +_OUTPUTCONFIG.fields_by_name["bigquery_destination"].message_type = _BIGQUERYDESTINATION _OUTPUTCONFIG.oneofs_by_name["destination"].fields.append( _OUTPUTCONFIG.fields_by_name["gcs_destination"] ) _OUTPUTCONFIG.fields_by_name[ "gcs_destination" ].containing_oneof = _OUTPUTCONFIG.oneofs_by_name["destination"] +_OUTPUTCONFIG.oneofs_by_name["destination"].fields.append( + _OUTPUTCONFIG.fields_by_name["bigquery_destination"] +) +_OUTPUTCONFIG.fields_by_name[ + "bigquery_destination" +].containing_oneof = _OUTPUTCONFIG.oneofs_by_name["destination"] _GCSDESTINATION.oneofs_by_name["object_uri"].fields.append( _GCSDESTINATION.fields_by_name["uri"] ) _GCSDESTINATION.fields_by_name["uri"].containing_oneof = _GCSDESTINATION.oneofs_by_name[ "object_uri" ] +_GCSDESTINATION.oneofs_by_name["object_uri"].fields.append( + _GCSDESTINATION.fields_by_name["uri_prefix"] +) +_GCSDESTINATION.fields_by_name[ + "uri_prefix" +].containing_oneof = _GCSDESTINATION.oneofs_by_name["object_uri"] DESCRIPTOR.message_types_by_name["ExportAssetsRequest"] = _EXPORTASSETSREQUEST DESCRIPTOR.message_types_by_name["ExportAssetsResponse"] = _EXPORTASSETSRESPONSE DESCRIPTOR.message_types_by_name[ @@ -509,6 +655,7 @@ ] = _BATCHGETASSETSHISTORYRESPONSE DESCRIPTOR.message_types_by_name["OutputConfig"] = _OUTPUTCONFIG DESCRIPTOR.message_types_by_name["GcsDestination"] = _GCSDESTINATION +DESCRIPTOR.message_types_by_name["BigQueryDestination"] = _BIGQUERYDESTINATION DESCRIPTOR.enum_types_by_name["ContentType"] = _CONTENTTYPE _sym_db.RegisterFileDescriptor(DESCRIPTOR) @@ -539,9 +686,8 @@ A list of asset types of which to take a snapshot for. For example: "compute.googleapis.com/Disk". If specified, only matching assets will be returned. See `Introduction to Cloud - Asset Inventory `__ for all - supported asset types. + Asset Inventory `__ for all supported asset types. content_type: Asset content type. If not specified, no content but the asset name will be returned. @@ -600,13 +746,12 @@ te.googleapis.com/projects/my_project_123/zones/zone1/instance s/instance1``. See `Resource Names `__ and `Resource - Name Format `__ - for more info. The request becomes a no-op if the asset name - list is empty, and the max size of the asset name list is 100 - in one request. + Name Format `__ for more info. The + request becomes a no-op if the asset name list is empty, and + the max size of the asset name list is 100 in one request. content_type: - Required. The content type. + Optional. The content type. read_time_window: Optional. The time window for the asset history. Both start\_time and end\_time are optional and if set, it must be @@ -653,6 +798,11 @@ Asset export destination. gcs_destination: Destination on Cloud Storage. + bigquery_destination: + Destination on BigQuery. The output table stores the fields in + asset proto as columns in BigQuery. The resource/iam\_policy + field is converted to a record with each field to a column, + except metadata to a single JSON string. """, # @@protoc_insertion_point(class_scope:google.cloud.asset.v1.OutputConfig) ), @@ -677,23 +827,72 @@ See `Viewing and Editing Object Metadata `__ for more information. + uri_prefix: + The uri prefix of all generated Cloud Storage objects. For + example: "gs://bucket\_name/object\_name\_prefix". Each object + uri is in format: "gs://bucket\_name/object\_name\_prefix// + and only contains assets for that type. starts from 0. For + example: "gs://bucket\_name/object\_name\_prefix/compute.googl + eapis.com/Disk/0" is the first shard of output objects + containing all compute.googleapis.com/Disk assets. An + INVALID\_ARGUMENT error will be returned if file with the same + name "gs://bucket\_name/object\_name\_prefix" already exists. """, # @@protoc_insertion_point(class_scope:google.cloud.asset.v1.GcsDestination) ), ) _sym_db.RegisterMessage(GcsDestination) +BigQueryDestination = _reflection.GeneratedProtocolMessageType( + "BigQueryDestination", + (_message.Message,), + dict( + DESCRIPTOR=_BIGQUERYDESTINATION, + __module__="google.cloud.asset_v1.proto.asset_service_pb2", + __doc__="""A BigQuery destination. + + + Attributes: + dataset: + Required. The BigQuery dataset in format + "projects/projectId/datasets/datasetId", to which the snapshot + result should be exported. If this dataset does not exist, the + export call returns an error. + table: + Required. The BigQuery table to which the snapshot result + should be written. If this table does not exist, a new table + with the given name will be created. + force: + If the destination table already exists and this flag is + ``TRUE``, the table will be overwritten by the contents of + assets snapshot. If the flag is not set and the destination + table already exists, the export call returns an error. + """, + # @@protoc_insertion_point(class_scope:google.cloud.asset.v1.BigQueryDestination) + ), +) +_sym_db.RegisterMessage(BigQueryDestination) + DESCRIPTOR._options = None +_EXPORTASSETSREQUEST.fields_by_name["parent"]._options = None +_EXPORTASSETSREQUEST.fields_by_name["output_config"]._options = None +_BATCHGETASSETSHISTORYREQUEST.fields_by_name["parent"]._options = None +_BATCHGETASSETSHISTORYREQUEST.fields_by_name["content_type"]._options = None +_BATCHGETASSETSHISTORYREQUEST.fields_by_name["read_time_window"]._options = None +_BIGQUERYDESTINATION.fields_by_name["dataset"]._options = None +_BIGQUERYDESTINATION.fields_by_name["table"]._options = None _ASSETSERVICE = _descriptor.ServiceDescriptor( name="AssetService", full_name="google.cloud.asset.v1.AssetService", file=DESCRIPTOR, index=0, - serialized_options=None, - serialized_start=1070, - serialized_end=1399, + serialized_options=_b( + "\312A\031cloudasset.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform" + ), + serialized_start=1529, + serialized_end=2028, methods=[ _descriptor.MethodDescriptor( name="ExportAssets", @@ -703,7 +902,7 @@ input_type=_EXPORTASSETSREQUEST, output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, serialized_options=_b( - '\202\323\344\223\002""\035/v1/{parent=*/*}:exportAssets:\001*' + '\202\323\344\223\002""\035/v1/{parent=*/*}:exportAssets:\001*\312AW\n*google.cloud.asset.v1.ExportAssetsResponse\022)google.cloud.asset.v1.ExportAssetsRequest' ), ), _descriptor.MethodDescriptor( diff --git a/asset/google/cloud/asset_v1/proto/assets.proto b/asset/google/cloud/asset_v1/proto/assets.proto index f6a8108c0bd2..e689b761822c 100644 --- a/asset/google/cloud/asset_v1/proto/assets.proto +++ b/asset/google/cloud/asset_v1/proto/assets.proto @@ -18,11 +18,13 @@ syntax = "proto3"; package google.cloud.asset.v1; import "google/api/annotations.proto"; +import "google/api/resource.proto"; import "google/iam/v1/policy.proto"; import "google/protobuf/any.proto"; import "google/protobuf/struct.proto"; import "google/protobuf/timestamp.proto"; +option cc_enable_arenas = true; option csharp_namespace = "Google.Cloud.Asset.V1"; option go_package = "google.golang.org/genproto/googleapis/cloud/asset/v1;asset"; option java_multiple_files = true; @@ -30,7 +32,6 @@ option java_outer_classname = "AssetProto"; option java_package = "com.google.cloud.asset.v1"; option php_namespace = "Google\\Cloud\\Asset\\V1"; - // Temporal asset. In addition to the asset, the temporal asset includes the // status of the asset and valid from and to time of it. message TemporalAsset { @@ -57,6 +58,11 @@ message TimeWindow { // Cloud asset. This includes all Google Cloud Platform resources, // Cloud IAM policies, and other non-GCP assets. message Asset { + option (google.api.resource) = { + type: "cloudasset.googleapis.com/Asset" + pattern: "*" + }; + // The full name of the asset. For example: // `//compute.googleapis.com/projects/my_project_123/zones/zone1/instances/instance1`. // See [Resource diff --git a/asset/google/cloud/asset_v1/proto/assets_pb2.py b/asset/google/cloud/asset_v1/proto/assets_pb2.py index 99fba5cde79d..0b37f61584ae 100644 --- a/asset/google/cloud/asset_v1/proto/assets_pb2.py +++ b/asset/google/cloud/asset_v1/proto/assets_pb2.py @@ -16,6 +16,7 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 from google.iam.v1 import iam_policy_pb2 as google_dot_iam_dot_v1_dot_policy__pb2 from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2 from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 @@ -27,13 +28,14 @@ package="google.cloud.asset.v1", syntax="proto3", serialized_options=_b( - "\n\031com.google.cloud.asset.v1B\nAssetProtoP\001Z:google.golang.org/genproto/googleapis/cloud/asset/v1;asset\252\002\025Google.Cloud.Asset.V1\312\002\025Google\\Cloud\\Asset\\V1" + "\n\031com.google.cloud.asset.v1B\nAssetProtoP\001Z:google.golang.org/genproto/googleapis/cloud/asset/v1;asset\370\001\001\252\002\025Google.Cloud.Asset.V1\312\002\025Google\\Cloud\\Asset\\V1" ), serialized_pb=_b( - '\n(google/cloud/asset_v1/proto/assets.proto\x12\x15google.cloud.asset.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x1agoogle/iam/v1/policy.proto\x1a\x19google/protobuf/any.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\x80\x01\n\rTemporalAsset\x12\x31\n\x06window\x18\x01 \x01(\x0b\x32!.google.cloud.asset.v1.TimeWindow\x12\x0f\n\x07\x64\x65leted\x18\x02 \x01(\x08\x12+\n\x05\x61sset\x18\x03 \x01(\x0b\x32\x1c.google.cloud.asset.v1.Asset"j\n\nTimeWindow\x12.\n\nstart_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"\x87\x01\n\x05\x41sset\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x12\n\nasset_type\x18\x02 \x01(\t\x12\x31\n\x08resource\x18\x03 \x01(\x0b\x32\x1f.google.cloud.asset.v1.Resource\x12)\n\niam_policy\x18\x04 \x01(\x0b\x32\x15.google.iam.v1.Policy"\xa0\x01\n\x08Resource\x12\x0f\n\x07version\x18\x01 \x01(\t\x12\x1e\n\x16\x64iscovery_document_uri\x18\x02 \x01(\t\x12\x16\n\x0e\x64iscovery_name\x18\x03 \x01(\t\x12\x14\n\x0cresource_url\x18\x04 \x01(\t\x12\x0e\n\x06parent\x18\x05 \x01(\t\x12%\n\x04\x64\x61ta\x18\x06 \x01(\x0b\x32\x17.google.protobuf.StructB\x95\x01\n\x19\x63om.google.cloud.asset.v1B\nAssetProtoP\x01Z:google.golang.org/genproto/googleapis/cloud/asset/v1;asset\xaa\x02\x15Google.Cloud.Asset.V1\xca\x02\x15Google\\Cloud\\Asset\\V1b\x06proto3' + '\n(google/cloud/asset_v1/proto/assets.proto\x12\x15google.cloud.asset.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x19google/api/resource.proto\x1a\x1agoogle/iam/v1/policy.proto\x1a\x19google/protobuf/any.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\x80\x01\n\rTemporalAsset\x12\x31\n\x06window\x18\x01 \x01(\x0b\x32!.google.cloud.asset.v1.TimeWindow\x12\x0f\n\x07\x64\x65leted\x18\x02 \x01(\x08\x12+\n\x05\x61sset\x18\x03 \x01(\x0b\x32\x1c.google.cloud.asset.v1.Asset"j\n\nTimeWindow\x12.\n\nstart_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"\xb0\x01\n\x05\x41sset\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x12\n\nasset_type\x18\x02 \x01(\t\x12\x31\n\x08resource\x18\x03 \x01(\x0b\x32\x1f.google.cloud.asset.v1.Resource\x12)\n\niam_policy\x18\x04 \x01(\x0b\x32\x15.google.iam.v1.Policy:\'\xea\x41$\n\x1f\x63loudasset.googleapis.com/Asset\x12\x01*"\xa0\x01\n\x08Resource\x12\x0f\n\x07version\x18\x01 \x01(\t\x12\x1e\n\x16\x64iscovery_document_uri\x18\x02 \x01(\t\x12\x16\n\x0e\x64iscovery_name\x18\x03 \x01(\t\x12\x14\n\x0cresource_url\x18\x04 \x01(\t\x12\x0e\n\x06parent\x18\x05 \x01(\t\x12%\n\x04\x64\x61ta\x18\x06 \x01(\x0b\x32\x17.google.protobuf.StructB\x98\x01\n\x19\x63om.google.cloud.asset.v1B\nAssetProtoP\x01Z:google.golang.org/genproto/googleapis/cloud/asset/v1;asset\xf8\x01\x01\xaa\x02\x15Google.Cloud.Asset.V1\xca\x02\x15Google\\Cloud\\Asset\\V1b\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_resource__pb2.DESCRIPTOR, google_dot_iam_dot_v1_dot_policy__pb2.DESCRIPTOR, google_dot_protobuf_dot_any__pb2.DESCRIPTOR, google_dot_protobuf_dot_struct__pb2.DESCRIPTOR, @@ -112,8 +114,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=216, - serialized_end=344, + serialized_start=243, + serialized_end=371, ) @@ -169,8 +171,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=346, - serialized_end=452, + serialized_start=373, + serialized_end=479, ) @@ -257,13 +259,13 @@ extensions=[], nested_types=[], enum_types=[], - serialized_options=None, + serialized_options=_b("\352A$\n\037cloudasset.googleapis.com/Asset\022\001*"), is_extendable=False, syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=455, - serialized_end=590, + serialized_start=482, + serialized_end=658, ) @@ -391,8 +393,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=593, - serialized_end=753, + serialized_start=661, + serialized_end=821, ) _TEMPORALASSET.fields_by_name["window"].message_type = _TIMEWINDOW @@ -541,4 +543,5 @@ DESCRIPTOR._options = None +_ASSET._options = None # @@protoc_insertion_point(module_scope) diff --git a/asset/setup.py b/asset/setup.py index a70843c8e1de..583cff983676 100644 --- a/asset/setup.py +++ b/asset/setup.py @@ -21,7 +21,7 @@ name = "google-cloud-asset" description = "Cloud Asset API API client library" -version = "0.4.1" +version = "0.5.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' diff --git a/asset/synth.metadata b/asset/synth.metadata index 2a27fdfa4cf0..630bcf259589 100644 --- a/asset/synth.metadata +++ b/asset/synth.metadata @@ -1,26 +1,26 @@ { - "updateTime": "2019-08-06T12:11:34.528326Z", + "updateTime": "2019-10-29T12:12:19.326302Z", "sources": [ { "generator": { "name": "artman", - "version": "0.32.1", - "dockerImage": "googleapis/artman@sha256:a684d40ba9a4e15946f5f2ca6b4bd9fe301192f522e9de4fff622118775f309b" + "version": "0.40.3", + "dockerImage": "googleapis/artman@sha256:c805f50525f5f557886c94ab76f56eaa09cb1da58c3ee95111fd34259376621a" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "e699b0cba64ffddfae39633417180f1f65875896", - "internalRef": "261759677" + "sha": "532773acbed8d09451dafb3d403ab1823e6a6e1e", + "internalRef": "277177415" } }, { "template": { "name": "python_library", "origin": "synthtool.gcp", - "version": "2019.5.2" + "version": "2019.10.17" } } ], diff --git a/automl/CHANGELOG.md b/automl/CHANGELOG.md index e991a616462e..2534c9441f0e 100644 --- a/automl/CHANGELOG.md +++ b/automl/CHANGELOG.md @@ -4,6 +4,39 @@ [1]: https://pypi.org/project/google-cloud-automl/#history +## 0.7.1 + +10-29-2019 13:45 PDT + + +### Implementation Changes +- Pass credentials to underlying clients in TableClient ([#9491](https://github.com/googleapis/google-cloud-python/pull/9491)) + +## 0.7.0 + +10-04-2019 15:37 PDT + +### Implementation Changes +- Return operation future from `AutoMlClient.create_dataset` (via synth).([#9423](https://github.com/googleapis/google-cloud-python/pull/9423)) + + +### New Features +- Add support for V1 API (via synth). ([#9388](https://github.com/googleapis/google-cloud-python/pull/9388)) +- Add support for passing project to 'GcsClient'. ([#9299](https://github.com/googleapis/google-cloud-python/pull/9299)) + +## 0.6.0 + +09-30-2019 10:40 PDT + +### New Features +- Add 'image_classification_model_deployment_metadata' arg to 'AutoMlClient.deploy_model' (via synth). ([#9291](https://github.com/googleapis/google-cloud-python/pull/9291)) + +### Documentation +- Fix intersphinx reference to requests. ([#9294](https://github.com/googleapis/google-cloud-python/pull/9294)) + +### Internal / Testing Changes +- Preserve GcsClient, 'pandas' extras in testing (via synth). ([#9179](https://github.com/googleapis/google-cloud-python/pull/9179)) + ## 0.5.0 08-28-2019 14:07 PDT diff --git a/automl/docs/conf.py b/automl/docs/conf.py index d2091505d055..9ac18387deed 100644 --- a/automl/docs/conf.py +++ b/automl/docs/conf.py @@ -318,7 +318,7 @@ u"google-cloud-automl Documentation", author, "google-cloud-automl", - "GAPIC library for the {metadata.shortName} v1beta1 service", + "GAPIC library for the {metadata.shortName} v1 service", "APIs", ) ] @@ -344,7 +344,7 @@ "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("https://requests.kennethreitz.org/en/stable/", None), + "requests": ("https://requests.kennethreitz.org/en/master/", None), "fastavro": ("https://fastavro.readthedocs.io/en/stable/", None), "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), } diff --git a/automl/docs/gapic/v1/api.rst b/automl/docs/gapic/v1/api.rst new file mode 100644 index 000000000000..757fc1a0f456 --- /dev/null +++ b/automl/docs/gapic/v1/api.rst @@ -0,0 +1,6 @@ +Client for Cloud AutoML API +=========================== + +.. automodule:: google.cloud.automl_v1 + :members: + :inherited-members: \ No newline at end of file diff --git a/automl/docs/gapic/v1/types.rst b/automl/docs/gapic/v1/types.rst new file mode 100644 index 000000000000..5fd25134fc13 --- /dev/null +++ b/automl/docs/gapic/v1/types.rst @@ -0,0 +1,5 @@ +Types for Cloud AutoML API Client +================================= + +.. automodule:: google.cloud.automl_v1.types + :members: \ No newline at end of file diff --git a/automl/docs/index.rst b/automl/docs/index.rst index 01f577642cb1..90c2bfd56246 100644 --- a/automl/docs/index.rst +++ b/automl/docs/index.rst @@ -1,8 +1,22 @@ .. include:: README.rst +This package includes clients for multiple versions of the Cloud AutoML API. +By default, you will get ``v1``, the latest stable version. -Api Reference -------------- +v1 API Reference +------------------------ +.. toctree:: + :maxdepth: 2 + + gapic/v1/api + gapic/v1/types + +Previous beta release v1beta1 is provided as well. + +An API and type reference is provided for ``v1beta1``: + +v1beta1 API Reference +---------------------- .. toctree:: :maxdepth: 2 @@ -11,6 +25,7 @@ Api Reference gapic/v1beta1/tables + Changelog --------- diff --git a/automl/google/cloud/automl.py b/automl/google/cloud/automl.py index 77528b3d1dbf..9f96f4f44dc7 100644 --- a/automl/google/cloud/automl.py +++ b/automl/google/cloud/automl.py @@ -17,10 +17,10 @@ from __future__ import absolute_import -from google.cloud.automl_v1beta1 import AutoMlClient -from google.cloud.automl_v1beta1 import PredictionServiceClient -from google.cloud.automl_v1beta1 import enums -from google.cloud.automl_v1beta1 import types +from google.cloud.automl_v1 import AutoMlClient +from google.cloud.automl_v1 import PredictionServiceClient +from google.cloud.automl_v1 import enums +from google.cloud.automl_v1 import types __all__ = ("enums", "types", "AutoMlClient", "PredictionServiceClient") diff --git a/automl/google/cloud/automl_v1/__init__.py b/automl/google/cloud/automl_v1/__init__.py new file mode 100644 index 000000000000..f68180a567ab --- /dev/null +++ b/automl/google/cloud/automl_v1/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from __future__ import absolute_import + +from google.cloud.automl_v1 import types +from google.cloud.automl_v1.gapic import auto_ml_client +from google.cloud.automl_v1.gapic import enums +from google.cloud.automl_v1.gapic import prediction_service_client + + +class AutoMlClient(auto_ml_client.AutoMlClient): + __doc__ = auto_ml_client.AutoMlClient.__doc__ + enums = enums + + +class PredictionServiceClient(prediction_service_client.PredictionServiceClient): + __doc__ = prediction_service_client.PredictionServiceClient.__doc__ + enums = enums + + +__all__ = ("enums", "types", "AutoMlClient", "PredictionServiceClient") diff --git a/logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/__init__.py b/automl/google/cloud/automl_v1/gapic/__init__.py similarity index 100% rename from logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/__init__.py rename to automl/google/cloud/automl_v1/gapic/__init__.py diff --git a/automl/google/cloud/automl_v1/gapic/auto_ml_client.py b/automl/google/cloud/automl_v1/gapic/auto_ml_client.py new file mode 100644 index 000000000000..eebed1ee3831 --- /dev/null +++ b/automl/google/cloud/automl_v1/gapic/auto_ml_client.py @@ -0,0 +1,1514 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Accesses the google.cloud.automl.v1 AutoMl API.""" + +import functools +import pkg_resources +import warnings + +from google.oauth2 import service_account +import google.api_core.client_options +import google.api_core.gapic_v1.client_info +import google.api_core.gapic_v1.config +import google.api_core.gapic_v1.method +import google.api_core.gapic_v1.routing_header +import google.api_core.grpc_helpers +import google.api_core.operation +import google.api_core.operations_v1 +import google.api_core.page_iterator +import google.api_core.path_template +import grpc + +from google.cloud.automl_v1.gapic import auto_ml_client_config +from google.cloud.automl_v1.gapic import enums +from google.cloud.automl_v1.gapic.transports import auto_ml_grpc_transport +from google.cloud.automl_v1.proto import dataset_pb2 +from google.cloud.automl_v1.proto import io_pb2 +from google.cloud.automl_v1.proto import model_evaluation_pb2 +from google.cloud.automl_v1.proto import model_pb2 +from google.cloud.automl_v1.proto import operations_pb2 as proto_operations_pb2 +from google.cloud.automl_v1.proto import service_pb2 +from google.cloud.automl_v1.proto import service_pb2_grpc +from google.longrunning import operations_pb2 as longrunning_operations_pb2 +from google.protobuf import empty_pb2 +from google.protobuf import field_mask_pb2 + + +_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-automl").version + + +class AutoMlClient(object): + """ + AutoML Server API. + + The resource names are assigned by the server. The server never reuses + names that it has created after the resources with those names are + deleted. + + An ID of a resource is the last element of the item's resource name. For + ``projects/{project_id}/locations/{location_id}/datasets/{dataset_id}``, + then the id for the item is ``{dataset_id}``. + + Currently the only supported ``location_id`` is "us-central1". + + On any input that is documented to expect a string parameter in + snake\_case or kebab-case, either of those cases is accepted. + """ + + SERVICE_ADDRESS = "automl.googleapis.com:443" + """The default address of the service.""" + + # The name of the interface for this client. This is the key used to + # find the method configuration in the client_config dictionary. + _INTERFACE_NAME = "google.cloud.automl.v1.AutoMl" + + @classmethod + def from_service_account_file(cls, filename, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AutoMlClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @classmethod + def dataset_path(cls, project, location, dataset): + """Return a fully-qualified dataset string.""" + return google.api_core.path_template.expand( + "projects/{project}/locations/{location}/datasets/{dataset}", + project=project, + location=location, + dataset=dataset, + ) + + @classmethod + def location_path(cls, project, location): + """Return a fully-qualified location string.""" + return google.api_core.path_template.expand( + "projects/{project}/locations/{location}", + project=project, + location=location, + ) + + @classmethod + def model_path(cls, project, location, model): + """Return a fully-qualified model string.""" + return google.api_core.path_template.expand( + "projects/{project}/locations/{location}/models/{model}", + project=project, + location=location, + model=model, + ) + + @classmethod + def model_evaluation_path(cls, project, location, model, model_evaluation): + """Return a fully-qualified model_evaluation string.""" + return google.api_core.path_template.expand( + "projects/{project}/locations/{location}/models/{model}/modelEvaluations/{model_evaluation}", + project=project, + location=location, + model=model, + model_evaluation=model_evaluation, + ) + + def __init__( + self, + transport=None, + channel=None, + credentials=None, + client_config=None, + client_info=None, + client_options=None, + ): + """Constructor. + + Args: + transport (Union[~.AutoMlGrpcTransport, + Callable[[~.Credentials, type], ~.AutoMlGrpcTransport]): A transport + instance, responsible for actually making the API calls. + The default transport uses the gRPC protocol. + This argument may also be a callable which returns a + transport instance. Callables will be sent the credentials + as the first argument and the default transport class as + the second argument. + channel (grpc.Channel): DEPRECATED. A ``Channel`` instance + through which to make calls. This argument is mutually exclusive + with ``credentials``; providing both will raise an exception. + credentials (google.auth.credentials.Credentials): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is mutually exclusive with providing a + transport instance to ``transport``; doing so will raise + an exception. + client_config (dict): DEPRECATED. A dictionary of call options for + each method. If not specified, the default configuration is used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + client_options (Union[dict, google.api_core.client_options.ClientOptions]): + Client options used to set user options on the client. API Endpoint + should be set through client_options. + """ + # Raise deprecation warnings for things we want to go away. + if client_config is not None: + warnings.warn( + "The `client_config` argument is deprecated.", + PendingDeprecationWarning, + stacklevel=2, + ) + else: + client_config = auto_ml_client_config.config + + if channel: + warnings.warn( + "The `channel` argument is deprecated; use " "`transport` instead.", + PendingDeprecationWarning, + stacklevel=2, + ) + + api_endpoint = self.SERVICE_ADDRESS + if client_options: + if type(client_options) == dict: + client_options = google.api_core.client_options.from_dict( + client_options + ) + if client_options.api_endpoint: + api_endpoint = client_options.api_endpoint + + # Instantiate the transport. + # The transport is responsible for handling serialization and + # deserialization and actually sending data to the service. + if transport: + if callable(transport): + self.transport = transport( + credentials=credentials, + default_class=auto_ml_grpc_transport.AutoMlGrpcTransport, + address=api_endpoint, + ) + else: + if credentials: + raise ValueError( + "Received both a transport instance and " + "credentials; these are mutually exclusive." + ) + self.transport = transport + else: + self.transport = auto_ml_grpc_transport.AutoMlGrpcTransport( + address=api_endpoint, channel=channel, credentials=credentials + ) + + if client_info is None: + client_info = google.api_core.gapic_v1.client_info.ClientInfo( + gapic_version=_GAPIC_LIBRARY_VERSION + ) + else: + client_info.gapic_version = _GAPIC_LIBRARY_VERSION + self._client_info = client_info + + # Parse out the default settings for retry and timeout for each RPC + # from the client configuration. + # (Ordinarily, these are the defaults specified in the `*_config.py` + # file next to this one.) + self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( + client_config["interfaces"][self._INTERFACE_NAME] + ) + + # Save a dictionary of cached API call functions. + # These are the actual callables which invoke the proper + # transport methods, wrapped with `wrap_method` to add retry, + # timeout, and the like. + self._inner_api_calls = {} + + # Service calls + def create_dataset( + self, + parent, + dataset, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Creates a dataset. + + Example: + >>> from google.cloud import automl_v1 + >>> + >>> client = automl_v1.AutoMlClient() + >>> + >>> parent = client.location_path('[PROJECT]', '[LOCATION]') + >>> + >>> # TODO: Initialize `dataset`: + >>> dataset = {} + >>> + >>> response = client.create_dataset(parent, dataset) + >>> + >>> def callback(operation_future): + ... # Handle result. + ... result = operation_future.result() + >>> + >>> response.add_done_callback(callback) + >>> + >>> # Handle metadata. + >>> metadata = response.metadata() + + Args: + parent (str): The resource name of the project to create the dataset for. + dataset (Union[dict, ~google.cloud.automl_v1.types.Dataset]): The dataset to create. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.automl_v1.types.Dataset` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.automl_v1.types._OperationFuture` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "create_dataset" not in self._inner_api_calls: + self._inner_api_calls[ + "create_dataset" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.create_dataset, + default_retry=self._method_configs["CreateDataset"].retry, + default_timeout=self._method_configs["CreateDataset"].timeout, + client_info=self._client_info, + ) + + request = service_pb2.CreateDatasetRequest(parent=parent, dataset=dataset) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("parent", parent)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + operation = self._inner_api_calls["create_dataset"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + return google.api_core.operation.from_gapic( + operation, + self.transport._operations_client, + dataset_pb2.Dataset, + metadata_type=proto_operations_pb2.OperationMetadata, + ) + + def update_dataset( + self, + dataset, + update_mask, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Updates a dataset. + + Example: + >>> from google.cloud import automl_v1 + >>> + >>> client = automl_v1.AutoMlClient() + >>> + >>> # TODO: Initialize `dataset`: + >>> dataset = {} + >>> + >>> # TODO: Initialize `update_mask`: + >>> update_mask = {} + >>> + >>> response = client.update_dataset(dataset, update_mask) + + Args: + dataset (Union[dict, ~google.cloud.automl_v1.types.Dataset]): The dataset which replaces the resource on the server. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.automl_v1.types.Dataset` + update_mask (Union[dict, ~google.cloud.automl_v1.types.FieldMask]): Required. The update mask applies to the resource. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.automl_v1.types.FieldMask` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.automl_v1.types.Dataset` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "update_dataset" not in self._inner_api_calls: + self._inner_api_calls[ + "update_dataset" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.update_dataset, + default_retry=self._method_configs["UpdateDataset"].retry, + default_timeout=self._method_configs["UpdateDataset"].timeout, + client_info=self._client_info, + ) + + request = service_pb2.UpdateDatasetRequest( + dataset=dataset, update_mask=update_mask + ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("dataset.name", dataset.name)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + return self._inner_api_calls["update_dataset"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def get_dataset( + self, + name, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Gets a dataset. + + Example: + >>> from google.cloud import automl_v1 + >>> + >>> client = automl_v1.AutoMlClient() + >>> + >>> name = client.dataset_path('[PROJECT]', '[LOCATION]', '[DATASET]') + >>> + >>> response = client.get_dataset(name) + + Args: + name (str): The resource name of the dataset to retrieve. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.automl_v1.types.Dataset` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "get_dataset" not in self._inner_api_calls: + self._inner_api_calls[ + "get_dataset" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.get_dataset, + default_retry=self._method_configs["GetDataset"].retry, + default_timeout=self._method_configs["GetDataset"].timeout, + client_info=self._client_info, + ) + + request = service_pb2.GetDatasetRequest(name=name) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("name", name)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + return self._inner_api_calls["get_dataset"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def list_datasets( + self, + parent, + filter_=None, + page_size=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Lists datasets in a project. + + Example: + >>> from google.cloud import automl_v1 + >>> + >>> client = automl_v1.AutoMlClient() + >>> + >>> parent = client.location_path('[PROJECT]', '[LOCATION]') + >>> + >>> # Iterate over all results + >>> for element in client.list_datasets(parent): + ... # process element + ... pass + >>> + >>> + >>> # Alternatively: + >>> + >>> # Iterate over results one page at a time + >>> for page in client.list_datasets(parent).pages: + ... for element in page: + ... # process element + ... pass + + Args: + parent (str): The resource name of the project from which to list datasets. + filter_ (str): An expression for filtering the results of the request. + + - ``dataset_metadata`` - for existence of the case (e.g. + image\_classification\_dataset\_metadata:\*). Some examples of using + the filter are: + + - ``translation_dataset_metadata:*`` --> The dataset has + translation\_dataset\_metadata. + page_size (int): The maximum number of resources contained in the + underlying API response. If page streaming is performed per- + resource, this parameter does not affect the return value. If page + streaming is performed per-page, this determines the maximum number + of resources in a page. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.api_core.page_iterator.PageIterator` instance. + An iterable of :class:`~google.cloud.automl_v1.types.Dataset` instances. + You can also iterate over the pages of the response + using its `pages` property. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "list_datasets" not in self._inner_api_calls: + self._inner_api_calls[ + "list_datasets" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.list_datasets, + default_retry=self._method_configs["ListDatasets"].retry, + default_timeout=self._method_configs["ListDatasets"].timeout, + client_info=self._client_info, + ) + + request = service_pb2.ListDatasetsRequest( + parent=parent, filter=filter_, page_size=page_size + ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("parent", parent)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + iterator = google.api_core.page_iterator.GRPCIterator( + client=None, + method=functools.partial( + self._inner_api_calls["list_datasets"], + retry=retry, + timeout=timeout, + metadata=metadata, + ), + request=request, + items_field="datasets", + request_token_field="page_token", + response_token_field="next_page_token", + ) + return iterator + + def delete_dataset( + self, + name, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Deletes a dataset and all of its contents. Returns empty response in the + ``response`` field when it completes, and ``delete_details`` in the + ``metadata`` field. + + Example: + >>> from google.cloud import automl_v1 + >>> + >>> client = automl_v1.AutoMlClient() + >>> + >>> name = client.dataset_path('[PROJECT]', '[LOCATION]', '[DATASET]') + >>> + >>> response = client.delete_dataset(name) + >>> + >>> def callback(operation_future): + ... # Handle result. + ... result = operation_future.result() + >>> + >>> response.add_done_callback(callback) + >>> + >>> # Handle metadata. + >>> metadata = response.metadata() + + Args: + name (str): The resource name of the dataset to delete. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.automl_v1.types._OperationFuture` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "delete_dataset" not in self._inner_api_calls: + self._inner_api_calls[ + "delete_dataset" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.delete_dataset, + default_retry=self._method_configs["DeleteDataset"].retry, + default_timeout=self._method_configs["DeleteDataset"].timeout, + client_info=self._client_info, + ) + + request = service_pb2.DeleteDatasetRequest(name=name) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("name", name)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + operation = self._inner_api_calls["delete_dataset"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + return google.api_core.operation.from_gapic( + operation, + self.transport._operations_client, + empty_pb2.Empty, + metadata_type=proto_operations_pb2.OperationMetadata, + ) + + def import_data( + self, + name, + input_config, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Imports data into a dataset. + + Example: + >>> from google.cloud import automl_v1 + >>> + >>> client = automl_v1.AutoMlClient() + >>> + >>> name = client.dataset_path('[PROJECT]', '[LOCATION]', '[DATASET]') + >>> + >>> # TODO: Initialize `input_config`: + >>> input_config = {} + >>> + >>> response = client.import_data(name, input_config) + >>> + >>> def callback(operation_future): + ... # Handle result. + ... result = operation_future.result() + >>> + >>> response.add_done_callback(callback) + >>> + >>> # Handle metadata. + >>> metadata = response.metadata() + + Args: + name (str): Required. Dataset name. Dataset must already exist. All imported + annotations and examples will be added. + input_config (Union[dict, ~google.cloud.automl_v1.types.InputConfig]): Required. The desired input location and its domain specific semantics, + if any. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.automl_v1.types.InputConfig` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.automl_v1.types._OperationFuture` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "import_data" not in self._inner_api_calls: + self._inner_api_calls[ + "import_data" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.import_data, + default_retry=self._method_configs["ImportData"].retry, + default_timeout=self._method_configs["ImportData"].timeout, + client_info=self._client_info, + ) + + request = service_pb2.ImportDataRequest(name=name, input_config=input_config) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("name", name)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + operation = self._inner_api_calls["import_data"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + return google.api_core.operation.from_gapic( + operation, + self.transport._operations_client, + empty_pb2.Empty, + metadata_type=proto_operations_pb2.OperationMetadata, + ) + + def export_data( + self, + name, + output_config, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Exports dataset's data to the provided output location. Returns an empty + response in the ``response`` field when it completes. + + Example: + >>> from google.cloud import automl_v1 + >>> + >>> client = automl_v1.AutoMlClient() + >>> + >>> name = client.dataset_path('[PROJECT]', '[LOCATION]', '[DATASET]') + >>> + >>> # TODO: Initialize `output_config`: + >>> output_config = {} + >>> + >>> response = client.export_data(name, output_config) + >>> + >>> def callback(operation_future): + ... # Handle result. + ... result = operation_future.result() + >>> + >>> response.add_done_callback(callback) + >>> + >>> # Handle metadata. + >>> metadata = response.metadata() + + Args: + name (str): Required. The resource name of the dataset. + output_config (Union[dict, ~google.cloud.automl_v1.types.OutputConfig]): Required. The desired output location. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.automl_v1.types.OutputConfig` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.automl_v1.types._OperationFuture` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "export_data" not in self._inner_api_calls: + self._inner_api_calls[ + "export_data" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.export_data, + default_retry=self._method_configs["ExportData"].retry, + default_timeout=self._method_configs["ExportData"].timeout, + client_info=self._client_info, + ) + + request = service_pb2.ExportDataRequest(name=name, output_config=output_config) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("name", name)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + operation = self._inner_api_calls["export_data"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + return google.api_core.operation.from_gapic( + operation, + self.transport._operations_client, + empty_pb2.Empty, + metadata_type=proto_operations_pb2.OperationMetadata, + ) + + def create_model( + self, + parent, + model, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Creates a model. Returns a Model in the ``response`` field when it + completes. When you create a model, several model evaluations are + created for it: a global evaluation, and one evaluation for each + annotation spec. + + Example: + >>> from google.cloud import automl_v1 + >>> + >>> client = automl_v1.AutoMlClient() + >>> + >>> parent = client.location_path('[PROJECT]', '[LOCATION]') + >>> + >>> # TODO: Initialize `model`: + >>> model = {} + >>> + >>> response = client.create_model(parent, model) + >>> + >>> def callback(operation_future): + ... # Handle result. + ... result = operation_future.result() + >>> + >>> response.add_done_callback(callback) + >>> + >>> # Handle metadata. + >>> metadata = response.metadata() + + Args: + parent (str): Resource name of the parent project where the model is being created. + model (Union[dict, ~google.cloud.automl_v1.types.Model]): The model to create. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.automl_v1.types.Model` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.automl_v1.types._OperationFuture` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "create_model" not in self._inner_api_calls: + self._inner_api_calls[ + "create_model" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.create_model, + default_retry=self._method_configs["CreateModel"].retry, + default_timeout=self._method_configs["CreateModel"].timeout, + client_info=self._client_info, + ) + + request = service_pb2.CreateModelRequest(parent=parent, model=model) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("parent", parent)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + operation = self._inner_api_calls["create_model"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + return google.api_core.operation.from_gapic( + operation, + self.transport._operations_client, + model_pb2.Model, + metadata_type=proto_operations_pb2.OperationMetadata, + ) + + def get_model( + self, + name, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Gets a model. + + Example: + >>> from google.cloud import automl_v1 + >>> + >>> client = automl_v1.AutoMlClient() + >>> + >>> name = client.model_path('[PROJECT]', '[LOCATION]', '[MODEL]') + >>> + >>> response = client.get_model(name) + + Args: + name (str): Resource name of the model. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.automl_v1.types.Model` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "get_model" not in self._inner_api_calls: + self._inner_api_calls[ + "get_model" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.get_model, + default_retry=self._method_configs["GetModel"].retry, + default_timeout=self._method_configs["GetModel"].timeout, + client_info=self._client_info, + ) + + request = service_pb2.GetModelRequest(name=name) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("name", name)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + return self._inner_api_calls["get_model"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def update_model( + self, + model, + update_mask, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Updates a model. + + Example: + >>> from google.cloud import automl_v1 + >>> + >>> client = automl_v1.AutoMlClient() + >>> + >>> # TODO: Initialize `model`: + >>> model = {} + >>> + >>> # TODO: Initialize `update_mask`: + >>> update_mask = {} + >>> + >>> response = client.update_model(model, update_mask) + + Args: + model (Union[dict, ~google.cloud.automl_v1.types.Model]): The model which replaces the resource on the server. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.automl_v1.types.Model` + update_mask (Union[dict, ~google.cloud.automl_v1.types.FieldMask]): Required. The update mask applies to the resource. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.automl_v1.types.FieldMask` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.automl_v1.types.Model` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "update_model" not in self._inner_api_calls: + self._inner_api_calls[ + "update_model" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.update_model, + default_retry=self._method_configs["UpdateModel"].retry, + default_timeout=self._method_configs["UpdateModel"].timeout, + client_info=self._client_info, + ) + + request = service_pb2.UpdateModelRequest(model=model, update_mask=update_mask) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("model.name", model.name)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + return self._inner_api_calls["update_model"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def list_models( + self, + parent, + filter_=None, + page_size=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Lists models. + + Example: + >>> from google.cloud import automl_v1 + >>> + >>> client = automl_v1.AutoMlClient() + >>> + >>> parent = client.location_path('[PROJECT]', '[LOCATION]') + >>> + >>> # Iterate over all results + >>> for element in client.list_models(parent): + ... # process element + ... pass + >>> + >>> + >>> # Alternatively: + >>> + >>> # Iterate over results one page at a time + >>> for page in client.list_models(parent).pages: + ... for element in page: + ... # process element + ... pass + + Args: + parent (str): Resource name of the project, from which to list the models. + filter_ (str): An expression for filtering the results of the request. + + - ``model_metadata`` - for existence of the case (e.g. + video\_classification\_model\_metadata:\*). + + - ``dataset_id`` - for = or !=. Some examples of using the filter are: + + - ``image_classification_model_metadata:*`` --> The model has + image\_classification\_model\_metadata. + + - ``dataset_id=5`` --> The model was created from a dataset with ID 5. + page_size (int): The maximum number of resources contained in the + underlying API response. If page streaming is performed per- + resource, this parameter does not affect the return value. If page + streaming is performed per-page, this determines the maximum number + of resources in a page. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.api_core.page_iterator.PageIterator` instance. + An iterable of :class:`~google.cloud.automl_v1.types.Model` instances. + You can also iterate over the pages of the response + using its `pages` property. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "list_models" not in self._inner_api_calls: + self._inner_api_calls[ + "list_models" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.list_models, + default_retry=self._method_configs["ListModels"].retry, + default_timeout=self._method_configs["ListModels"].timeout, + client_info=self._client_info, + ) + + request = service_pb2.ListModelsRequest( + parent=parent, filter=filter_, page_size=page_size + ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("parent", parent)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + iterator = google.api_core.page_iterator.GRPCIterator( + client=None, + method=functools.partial( + self._inner_api_calls["list_models"], + retry=retry, + timeout=timeout, + metadata=metadata, + ), + request=request, + items_field="model", + request_token_field="page_token", + response_token_field="next_page_token", + ) + return iterator + + def delete_model( + self, + name, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Deletes a model. Returns ``google.protobuf.Empty`` in the ``response`` + field when it completes, and ``delete_details`` in the ``metadata`` + field. + + Example: + >>> from google.cloud import automl_v1 + >>> + >>> client = automl_v1.AutoMlClient() + >>> + >>> name = client.model_path('[PROJECT]', '[LOCATION]', '[MODEL]') + >>> + >>> response = client.delete_model(name) + >>> + >>> def callback(operation_future): + ... # Handle result. + ... result = operation_future.result() + >>> + >>> response.add_done_callback(callback) + >>> + >>> # Handle metadata. + >>> metadata = response.metadata() + + Args: + name (str): Resource name of the model being deleted. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.automl_v1.types._OperationFuture` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "delete_model" not in self._inner_api_calls: + self._inner_api_calls[ + "delete_model" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.delete_model, + default_retry=self._method_configs["DeleteModel"].retry, + default_timeout=self._method_configs["DeleteModel"].timeout, + client_info=self._client_info, + ) + + request = service_pb2.DeleteModelRequest(name=name) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("name", name)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + operation = self._inner_api_calls["delete_model"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + return google.api_core.operation.from_gapic( + operation, + self.transport._operations_client, + empty_pb2.Empty, + metadata_type=proto_operations_pb2.OperationMetadata, + ) + + def get_model_evaluation( + self, + name, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Gets a model evaluation. + + Example: + >>> from google.cloud import automl_v1 + >>> + >>> client = automl_v1.AutoMlClient() + >>> + >>> name = client.model_evaluation_path('[PROJECT]', '[LOCATION]', '[MODEL]', '[MODEL_EVALUATION]') + >>> + >>> response = client.get_model_evaluation(name) + + Args: + name (str): Resource name for the model evaluation. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.automl_v1.types.ModelEvaluation` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "get_model_evaluation" not in self._inner_api_calls: + self._inner_api_calls[ + "get_model_evaluation" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.get_model_evaluation, + default_retry=self._method_configs["GetModelEvaluation"].retry, + default_timeout=self._method_configs["GetModelEvaluation"].timeout, + client_info=self._client_info, + ) + + request = service_pb2.GetModelEvaluationRequest(name=name) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("name", name)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + return self._inner_api_calls["get_model_evaluation"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def list_model_evaluations( + self, + parent, + filter_, + page_size=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Lists model evaluations. + + Example: + >>> from google.cloud import automl_v1 + >>> + >>> client = automl_v1.AutoMlClient() + >>> + >>> parent = client.model_path('[PROJECT]', '[LOCATION]', '[MODEL]') + >>> + >>> # TODO: Initialize `filter_`: + >>> filter_ = '' + >>> + >>> # Iterate over all results + >>> for element in client.list_model_evaluations(parent, filter_): + ... # process element + ... pass + >>> + >>> + >>> # Alternatively: + >>> + >>> # Iterate over results one page at a time + >>> for page in client.list_model_evaluations(parent, filter_).pages: + ... for element in page: + ... # process element + ... pass + + Args: + parent (str): Resource name of the model to list the model evaluations for. + If modelId is set as "-", this will list model evaluations from across all + models of the parent location. + filter_ (str): An expression for filtering the results of the request. + + - ``annotation_spec_id`` - for =, != or existence. See example below + for the last. + + Some examples of using the filter are: + + - ``annotation_spec_id!=4`` --> The model evaluation was done for + annotation spec with ID different than 4. + - ``NOT annotation_spec_id:*`` --> The model evaluation was done for + aggregate of all annotation specs. + page_size (int): The maximum number of resources contained in the + underlying API response. If page streaming is performed per- + resource, this parameter does not affect the return value. If page + streaming is performed per-page, this determines the maximum number + of resources in a page. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.api_core.page_iterator.PageIterator` instance. + An iterable of :class:`~google.cloud.automl_v1.types.ModelEvaluation` instances. + You can also iterate over the pages of the response + using its `pages` property. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "list_model_evaluations" not in self._inner_api_calls: + self._inner_api_calls[ + "list_model_evaluations" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.list_model_evaluations, + default_retry=self._method_configs["ListModelEvaluations"].retry, + default_timeout=self._method_configs["ListModelEvaluations"].timeout, + client_info=self._client_info, + ) + + request = service_pb2.ListModelEvaluationsRequest( + parent=parent, filter=filter_, page_size=page_size + ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("parent", parent)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + iterator = google.api_core.page_iterator.GRPCIterator( + client=None, + method=functools.partial( + self._inner_api_calls["list_model_evaluations"], + retry=retry, + timeout=timeout, + metadata=metadata, + ), + request=request, + items_field="model_evaluation", + request_token_field="page_token", + response_token_field="next_page_token", + ) + return iterator diff --git a/automl/google/cloud/automl_v1/gapic/auto_ml_client_config.py b/automl/google/cloud/automl_v1/gapic/auto_ml_client_config.py new file mode 100644 index 000000000000..6822a905d8bf --- /dev/null +++ b/automl/google/cloud/automl_v1/gapic/auto_ml_client_config.py @@ -0,0 +1,93 @@ +config = { + "interfaces": { + "google.cloud.automl.v1.AutoMl": { + "retry_codes": { + "idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], + "non_idempotent": [], + }, + "retry_params": { + "default": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 20000, + "rpc_timeout_multiplier": 1.0, + "max_rpc_timeout_millis": 20000, + "total_timeout_millis": 600000, + } + }, + "methods": { + "CreateDataset": { + "timeout_millis": 5000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default", + }, + "UpdateDataset": { + "timeout_millis": 5000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default", + }, + "GetDataset": { + "timeout_millis": 5000, + "retry_codes_name": "idempotent", + "retry_params_name": "default", + }, + "ListDatasets": { + "timeout_millis": 50000, + "retry_codes_name": "idempotent", + "retry_params_name": "default", + }, + "DeleteDataset": { + "timeout_millis": 5000, + "retry_codes_name": "idempotent", + "retry_params_name": "default", + }, + "ImportData": { + "timeout_millis": 20000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default", + }, + "ExportData": { + "timeout_millis": 5000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default", + }, + "CreateModel": { + "timeout_millis": 20000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default", + }, + "GetModel": { + "timeout_millis": 5000, + "retry_codes_name": "idempotent", + "retry_params_name": "default", + }, + "UpdateModel": { + "timeout_millis": 5000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default", + }, + "ListModels": { + "timeout_millis": 50000, + "retry_codes_name": "idempotent", + "retry_params_name": "default", + }, + "DeleteModel": { + "timeout_millis": 5000, + "retry_codes_name": "idempotent", + "retry_params_name": "default", + }, + "GetModelEvaluation": { + "timeout_millis": 5000, + "retry_codes_name": "idempotent", + "retry_params_name": "default", + }, + "ListModelEvaluations": { + "timeout_millis": 50000, + "retry_codes_name": "idempotent", + "retry_params_name": "default", + }, + }, + } + } +} diff --git a/automl/google/cloud/automl_v1/gapic/enums.py b/automl/google/cloud/automl_v1/gapic/enums.py new file mode 100644 index 000000000000..d9c50d5677e5 --- /dev/null +++ b/automl/google/cloud/automl_v1/gapic/enums.py @@ -0,0 +1,35 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Wrappers for protocol buffer enum types.""" + +import enum + + +class Model(object): + class DeploymentState(enum.IntEnum): + """ + Deployment state of the model. + + Attributes: + DEPLOYMENT_STATE_UNSPECIFIED (int): Should not be used, an un-set enum has this value by default. + DEPLOYED (int): Model is deployed. + UNDEPLOYED (int): Model is not deployed. + """ + + DEPLOYMENT_STATE_UNSPECIFIED = 0 + DEPLOYED = 1 + UNDEPLOYED = 2 diff --git a/videointelligence/google/cloud/videointelligence_v1beta1/gapic/video_intelligence_service_client.py b/automl/google/cloud/automl_v1/gapic/prediction_service_client.py similarity index 57% rename from videointelligence/google/cloud/videointelligence_v1beta1/gapic/video_intelligence_service_client.py rename to automl/google/cloud/automl_v1/gapic/prediction_service_client.py index 6a22632a434a..274d7cf6db76 100644 --- a/videointelligence/google/cloud/videointelligence_v1beta1/gapic/video_intelligence_service_client.py +++ b/automl/google/cloud/automl_v1/gapic/prediction_service_client.py @@ -14,7 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""Accesses the google.cloud.videointelligence.v1beta1 VideoIntelligenceService API.""" +"""Accesses the google.cloud.automl.v1 PredictionService API.""" import pkg_resources import warnings @@ -24,37 +24,46 @@ import google.api_core.gapic_v1.client_info import google.api_core.gapic_v1.config import google.api_core.gapic_v1.method +import google.api_core.gapic_v1.routing_header import google.api_core.grpc_helpers -import google.api_core.operation -import google.api_core.operations_v1 +import google.api_core.path_template import grpc -from google.cloud.videointelligence_v1beta1.gapic import enums -from google.cloud.videointelligence_v1beta1.gapic import ( - video_intelligence_service_client_config, -) -from google.cloud.videointelligence_v1beta1.gapic.transports import ( - video_intelligence_service_grpc_transport, -) -from google.cloud.videointelligence_v1beta1.proto import video_intelligence_pb2 -from google.cloud.videointelligence_v1beta1.proto import video_intelligence_pb2_grpc -from google.longrunning import operations_pb2 +from google.cloud.automl_v1.gapic import enums +from google.cloud.automl_v1.gapic import prediction_service_client_config +from google.cloud.automl_v1.gapic.transports import prediction_service_grpc_transport +from google.cloud.automl_v1.proto import data_items_pb2 +from google.cloud.automl_v1.proto import dataset_pb2 +from google.cloud.automl_v1.proto import io_pb2 +from google.cloud.automl_v1.proto import model_evaluation_pb2 +from google.cloud.automl_v1.proto import model_pb2 +from google.cloud.automl_v1.proto import operations_pb2 as proto_operations_pb2 +from google.cloud.automl_v1.proto import prediction_service_pb2 +from google.cloud.automl_v1.proto import prediction_service_pb2_grpc +from google.cloud.automl_v1.proto import service_pb2 +from google.cloud.automl_v1.proto import service_pb2_grpc +from google.longrunning import operations_pb2 as longrunning_operations_pb2 +from google.protobuf import empty_pb2 +from google.protobuf import field_mask_pb2 -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( - "google-cloud-videointelligence" -).version +_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-automl").version -class VideoIntelligenceServiceClient(object): - """Service that implements Google Cloud Video Intelligence API.""" +class PredictionServiceClient(object): + """ + AutoML Prediction API. - SERVICE_ADDRESS = "videointelligence.googleapis.com:443" + On any input that is documented to expect a string parameter in + snake\_case or kebab-case, either of those cases is accepted. + """ + + SERVICE_ADDRESS = "automl.googleapis.com:443" """The default address of the service.""" # The name of the interface for this client. This is the key used to # find the method configuration in the client_config dictionary. - _INTERFACE_NAME = "google.cloud.videointelligence.v1beta1.VideoIntelligenceService" + _INTERFACE_NAME = "google.cloud.automl.v1.PredictionService" @classmethod def from_service_account_file(cls, filename, *args, **kwargs): @@ -68,7 +77,7 @@ def from_service_account_file(cls, filename, *args, **kwargs): kwargs: Additional arguments to pass to the constructor. Returns: - VideoIntelligenceServiceClient: The constructed client. + PredictionServiceClient: The constructed client. """ credentials = service_account.Credentials.from_service_account_file(filename) kwargs["credentials"] = credentials @@ -76,6 +85,16 @@ def from_service_account_file(cls, filename, *args, **kwargs): from_service_account_json = from_service_account_file + @classmethod + def model_path(cls, project, location, model): + """Return a fully-qualified model string.""" + return google.api_core.path_template.expand( + "projects/{project}/locations/{location}/models/{model}", + project=project, + location=location, + model=model, + ) + def __init__( self, transport=None, @@ -88,8 +107,8 @@ def __init__( """Constructor. Args: - transport (Union[~.VideoIntelligenceServiceGrpcTransport, - Callable[[~.Credentials, type], ~.VideoIntelligenceServiceGrpcTransport]): A transport + transport (Union[~.PredictionServiceGrpcTransport, + Callable[[~.Credentials, type], ~.PredictionServiceGrpcTransport]): A transport instance, responsible for actually making the API calls. The default transport uses the gRPC protocol. This argument may also be a callable which returns a @@ -126,7 +145,7 @@ def __init__( stacklevel=2, ) else: - client_config = video_intelligence_service_client_config.config + client_config = prediction_service_client_config.config if channel: warnings.warn( @@ -151,7 +170,7 @@ def __init__( if callable(transport): self.transport = transport( credentials=credentials, - default_class=video_intelligence_service_grpc_transport.VideoIntelligenceServiceGrpcTransport, + default_class=prediction_service_grpc_transport.PredictionServiceGrpcTransport, address=api_endpoint, ) else: @@ -162,7 +181,7 @@ def __init__( ) self.transport = transport else: - self.transport = video_intelligence_service_grpc_transport.VideoIntelligenceServiceGrpcTransport( + self.transport = prediction_service_grpc_transport.PredictionServiceGrpcTransport( address=api_endpoint, channel=channel, credentials=credentials ) @@ -189,75 +208,44 @@ def __init__( self._inner_api_calls = {} # Service calls - def annotate_video( + def predict( self, - input_uri, - features, - input_content=None, - video_context=None, - output_uri=None, - location_id=None, + name, + payload, + params=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ - Performs asynchronous video annotation. Progress and results can be - retrieved through the ``google.longrunning.Operations`` interface. - ``Operation.metadata`` contains ``AnnotateVideoProgress`` (progress). - ``Operation.response`` contains ``AnnotateVideoResponse`` (results). + Perform an online prediction. The prediction result will be directly + returned in the response. Available for following ML problems, and their + expected request payloads: + + - Translation - TextSnippet, content up to 25,000 characters, UTF-8 + encoded. Example: - >>> from google.cloud import videointelligence_v1beta1 - >>> from google.cloud.videointelligence_v1beta1 import enums - >>> - >>> client = videointelligence_v1beta1.VideoIntelligenceServiceClient() + >>> from google.cloud import automl_v1 >>> - >>> input_uri = 'gs://cloud-samples-data/video/cat.mp4' - >>> features_element = enums.Feature.LABEL_DETECTION - >>> features = [features_element] + >>> client = automl_v1.PredictionServiceClient() >>> - >>> response = client.annotate_video(input_uri, features) + >>> name = client.model_path('[PROJECT]', '[LOCATION]', '[MODEL]') >>> - >>> def callback(operation_future): - ... # Handle result. - ... result = operation_future.result() + >>> # TODO: Initialize `payload`: + >>> payload = {} >>> - >>> response.add_done_callback(callback) - >>> - >>> # Handle metadata. - >>> metadata = response.metadata() + >>> response = client.predict(name, payload) Args: - input_uri (str): Input video location. Currently, only `Google Cloud - Storage `__ URIs are supported, which - must be specified in the following format: ``gs://bucket-id/object-id`` - (other URI formats return ``google.rpc.Code.INVALID_ARGUMENT``). For - more information, see `Request - URIs `__. A video - URI may include wildcards in ``object-id``, and thus identify multiple - videos. Supported wildcards: '\*' to match 0 or more characters; '?' to - match 1 character. If unset, the input video should be embedded in the - request as ``input_content``. If set, ``input_content`` should be unset. - features (list[~google.cloud.videointelligence_v1beta1.types.Feature]): Requested video annotation features. - input_content (str): The video data bytes. Encoding: base64. If unset, the input video(s) - should be specified via ``input_uri``. If set, ``input_uri`` should be - unset. - video_context (Union[dict, ~google.cloud.videointelligence_v1beta1.types.VideoContext]): Additional video context and/or feature-specific parameters. + name (str): Name of the model requested to serve the prediction. + payload (Union[dict, ~google.cloud.automl_v1.types.ExamplePayload]): Required. Payload to perform a prediction on. The payload must match the + problem type that the model was trained to solve. If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.videointelligence_v1beta1.types.VideoContext` - output_uri (str): Optional location where the output (in JSON format) should be stored. - Currently, only `Google Cloud - Storage `__ URIs are supported, which - must be specified in the following format: ``gs://bucket-id/object-id`` - (other URI formats return ``google.rpc.Code.INVALID_ARGUMENT``). For - more information, see `Request - URIs `__. - location_id (str): Optional cloud region where annotation should take place. Supported - cloud regions: ``us-east1``, ``us-west1``, ``europe-west1``, - ``asia-east1``. If no region is specified, a region will be determined - based on video file location. + message :class:`~google.cloud.automl_v1.types.ExamplePayload` + params (dict[str -> str]): Additional domain-specific parameters, any string must be up to 25000 + characters long. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will be retried using a default configuration. @@ -268,7 +256,7 @@ def annotate_video( that is provided to the method. Returns: - A :class:`~google.cloud.videointelligence_v1beta1.types._OperationFuture` instance. + A :class:`~google.cloud.automl_v1.types.PredictResponse` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request @@ -278,30 +266,32 @@ def annotate_video( ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if "annotate_video" not in self._inner_api_calls: + if "predict" not in self._inner_api_calls: self._inner_api_calls[ - "annotate_video" + "predict" ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.annotate_video, - default_retry=self._method_configs["AnnotateVideo"].retry, - default_timeout=self._method_configs["AnnotateVideo"].timeout, + self.transport.predict, + default_retry=self._method_configs["Predict"].retry, + default_timeout=self._method_configs["Predict"].timeout, client_info=self._client_info, ) - request = video_intelligence_pb2.AnnotateVideoRequest( - input_uri=input_uri, - features=features, - input_content=input_content, - video_context=video_context, - output_uri=output_uri, - location_id=location_id, + request = prediction_service_pb2.PredictRequest( + name=name, payload=payload, params=params ) - operation = self._inner_api_calls["annotate_video"]( + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("name", name)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + return self._inner_api_calls["predict"]( request, retry=retry, timeout=timeout, metadata=metadata ) - return google.api_core.operation.from_gapic( - operation, - self.transport._operations_client, - video_intelligence_pb2.AnnotateVideoResponse, - metadata_type=video_intelligence_pb2.AnnotateVideoProgress, - ) diff --git a/videointelligence/google/cloud/videointelligence_v1beta1/gapic/video_intelligence_service_client_config.py b/automl/google/cloud/automl_v1/gapic/prediction_service_client_config.py similarity index 55% rename from videointelligence/google/cloud/videointelligence_v1beta1/gapic/video_intelligence_service_client_config.py rename to automl/google/cloud/automl_v1/gapic/prediction_service_client_config.py index fdf442f5c941..21fc698d48db 100644 --- a/videointelligence/google/cloud/videointelligence_v1beta1/gapic/video_intelligence_service_client_config.py +++ b/automl/google/cloud/automl_v1/gapic/prediction_service_client_config.py @@ -1,25 +1,25 @@ config = { "interfaces": { - "google.cloud.videointelligence.v1beta1.VideoIntelligenceService": { + "google.cloud.automl.v1.PredictionService": { "retry_codes": { "idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], "non_idempotent": [], }, "retry_params": { "default": { - "initial_retry_delay_millis": 1000, - "retry_delay_multiplier": 2.5, - "max_retry_delay_millis": 120000, - "initial_rpc_timeout_millis": 120000, + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 20000, "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 120000, + "max_rpc_timeout_millis": 20000, "total_timeout_millis": 600000, } }, "methods": { - "AnnotateVideo": { + "Predict": { "timeout_millis": 60000, - "retry_codes_name": "idempotent", + "retry_codes_name": "non_idempotent", "retry_params_name": "default", } }, diff --git a/logging/google/cloud/logging_v2/proto/logging/type/__init__.py b/automl/google/cloud/automl_v1/gapic/transports/__init__.py similarity index 100% rename from logging/google/cloud/logging_v2/proto/logging/type/__init__.py rename to automl/google/cloud/automl_v1/gapic/transports/__init__.py diff --git a/automl/google/cloud/automl_v1/gapic/transports/auto_ml_grpc_transport.py b/automl/google/cloud/automl_v1/gapic/transports/auto_ml_grpc_transport.py new file mode 100644 index 000000000000..e07d24ffbbe4 --- /dev/null +++ b/automl/google/cloud/automl_v1/gapic/transports/auto_ml_grpc_transport.py @@ -0,0 +1,304 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import google.api_core.grpc_helpers +import google.api_core.operations_v1 + +from google.cloud.automl_v1.proto import service_pb2_grpc + + +class AutoMlGrpcTransport(object): + """gRPC transport class providing stubs for + google.cloud.automl.v1 AutoMl API. + + The transport provides access to the raw gRPC stubs, + which can be used to take advantage of advanced + features of gRPC. + """ + + # The scopes needed to make gRPC calls to all of the methods defined + # in this service. + _OAUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + def __init__( + self, channel=None, credentials=None, address="automl.googleapis.com:443" + ): + """Instantiate the transport class. + + Args: + channel (grpc.Channel): A ``Channel`` instance through + which to make calls. This argument is mutually exclusive + with ``credentials``; providing both will raise an exception. + credentials (google.auth.credentials.Credentials): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If none + are specified, the client will attempt to ascertain the + credentials from the environment. + address (str): The address where the service is hosted. + """ + # If both `channel` and `credentials` are specified, raise an + # exception (channels come with credentials baked in already). + if channel is not None and credentials is not None: + raise ValueError( + "The `channel` and `credentials` arguments are mutually " "exclusive." + ) + + # Create the channel. + if channel is None: + channel = self.create_channel( + address=address, + credentials=credentials, + options={ + "grpc.max_send_message_length": -1, + "grpc.max_receive_message_length": -1, + }.items(), + ) + + self._channel = channel + + # gRPC uses objects called "stubs" that are bound to the + # channel and provide a basic method for each RPC. + self._stubs = {"auto_ml_stub": service_pb2_grpc.AutoMlStub(channel)} + + # Because this API includes a method that returns a + # long-running operation (proto: google.longrunning.Operation), + # instantiate an LRO client. + self._operations_client = google.api_core.operations_v1.OperationsClient( + channel + ) + + @classmethod + def create_channel( + cls, address="automl.googleapis.com:443", credentials=None, **kwargs + ): + """Create and return a gRPC channel object. + + Args: + address (str): The host for the channel to use. + credentials (~.Credentials): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + kwargs (dict): Keyword arguments, which are passed to the + channel creation. + + Returns: + grpc.Channel: A gRPC channel object. + """ + return google.api_core.grpc_helpers.create_channel( + address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs + ) + + @property + def channel(self): + """The gRPC channel used by the transport. + + Returns: + grpc.Channel: A gRPC channel object. + """ + return self._channel + + @property + def create_dataset(self): + """Return the gRPC stub for :meth:`AutoMlClient.create_dataset`. + + Creates a dataset. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["auto_ml_stub"].CreateDataset + + @property + def update_dataset(self): + """Return the gRPC stub for :meth:`AutoMlClient.update_dataset`. + + Updates a dataset. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["auto_ml_stub"].UpdateDataset + + @property + def get_dataset(self): + """Return the gRPC stub for :meth:`AutoMlClient.get_dataset`. + + Gets a dataset. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["auto_ml_stub"].GetDataset + + @property + def list_datasets(self): + """Return the gRPC stub for :meth:`AutoMlClient.list_datasets`. + + Lists datasets in a project. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["auto_ml_stub"].ListDatasets + + @property + def delete_dataset(self): + """Return the gRPC stub for :meth:`AutoMlClient.delete_dataset`. + + Deletes a dataset and all of its contents. Returns empty response in the + ``response`` field when it completes, and ``delete_details`` in the + ``metadata`` field. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["auto_ml_stub"].DeleteDataset + + @property + def import_data(self): + """Return the gRPC stub for :meth:`AutoMlClient.import_data`. + + Imports data into a dataset. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["auto_ml_stub"].ImportData + + @property + def export_data(self): + """Return the gRPC stub for :meth:`AutoMlClient.export_data`. + + Exports dataset's data to the provided output location. Returns an empty + response in the ``response`` field when it completes. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["auto_ml_stub"].ExportData + + @property + def create_model(self): + """Return the gRPC stub for :meth:`AutoMlClient.create_model`. + + Creates a model. Returns a Model in the ``response`` field when it + completes. When you create a model, several model evaluations are + created for it: a global evaluation, and one evaluation for each + annotation spec. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["auto_ml_stub"].CreateModel + + @property + def get_model(self): + """Return the gRPC stub for :meth:`AutoMlClient.get_model`. + + Gets a model. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["auto_ml_stub"].GetModel + + @property + def update_model(self): + """Return the gRPC stub for :meth:`AutoMlClient.update_model`. + + Updates a model. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["auto_ml_stub"].UpdateModel + + @property + def list_models(self): + """Return the gRPC stub for :meth:`AutoMlClient.list_models`. + + Lists models. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["auto_ml_stub"].ListModels + + @property + def delete_model(self): + """Return the gRPC stub for :meth:`AutoMlClient.delete_model`. + + Deletes a model. Returns ``google.protobuf.Empty`` in the ``response`` + field when it completes, and ``delete_details`` in the ``metadata`` + field. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["auto_ml_stub"].DeleteModel + + @property + def get_model_evaluation(self): + """Return the gRPC stub for :meth:`AutoMlClient.get_model_evaluation`. + + Gets a model evaluation. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["auto_ml_stub"].GetModelEvaluation + + @property + def list_model_evaluations(self): + """Return the gRPC stub for :meth:`AutoMlClient.list_model_evaluations`. + + Lists model evaluations. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["auto_ml_stub"].ListModelEvaluations diff --git a/videointelligence/google/cloud/videointelligence_v1beta1/gapic/transports/video_intelligence_service_grpc_transport.py b/automl/google/cloud/automl_v1/gapic/transports/prediction_service_grpc_transport.py similarity index 74% rename from videointelligence/google/cloud/videointelligence_v1beta1/gapic/transports/video_intelligence_service_grpc_transport.py rename to automl/google/cloud/automl_v1/gapic/transports/prediction_service_grpc_transport.py index 9eaba1a970b3..9fa5a6f8a12e 100644 --- a/videointelligence/google/cloud/videointelligence_v1beta1/gapic/transports/video_intelligence_service_grpc_transport.py +++ b/automl/google/cloud/automl_v1/gapic/transports/prediction_service_grpc_transport.py @@ -16,14 +16,13 @@ import google.api_core.grpc_helpers -import google.api_core.operations_v1 -from google.cloud.videointelligence_v1beta1.proto import video_intelligence_pb2_grpc +from google.cloud.automl_v1.proto import prediction_service_pb2_grpc -class VideoIntelligenceServiceGrpcTransport(object): +class PredictionServiceGrpcTransport(object): """gRPC transport class providing stubs for - google.cloud.videointelligence.v1beta1 VideoIntelligenceService API. + google.cloud.automl.v1 PredictionService API. The transport provides access to the raw gRPC stubs, which can be used to take advantage of advanced @@ -35,10 +34,7 @@ class VideoIntelligenceServiceGrpcTransport(object): _OAUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) def __init__( - self, - channel=None, - credentials=None, - address="videointelligence.googleapis.com:443", + self, channel=None, credentials=None, address="automl.googleapis.com:443" ): """Instantiate the transport class. @@ -76,21 +72,14 @@ def __init__( # gRPC uses objects called "stubs" that are bound to the # channel and provide a basic method for each RPC. self._stubs = { - "video_intelligence_service_stub": video_intelligence_pb2_grpc.VideoIntelligenceServiceStub( + "prediction_service_stub": prediction_service_pb2_grpc.PredictionServiceStub( channel ) } - # Because this API includes a method that returns a - # long-running operation (proto: google.longrunning.Operation), - # instantiate an LRO client. - self._operations_client = google.api_core.operations_v1.OperationsClient( - channel - ) - @classmethod def create_channel( - cls, address="videointelligence.googleapis.com:443", credentials=None, **kwargs + cls, address="automl.googleapis.com:443", credentials=None, **kwargs ): """Create and return a gRPC channel object. @@ -121,17 +110,19 @@ def channel(self): return self._channel @property - def annotate_video(self): - """Return the gRPC stub for :meth:`VideoIntelligenceServiceClient.annotate_video`. + def predict(self): + """Return the gRPC stub for :meth:`PredictionServiceClient.predict`. + + Perform an online prediction. The prediction result will be directly + returned in the response. Available for following ML problems, and their + expected request payloads: - Performs asynchronous video annotation. Progress and results can be - retrieved through the ``google.longrunning.Operations`` interface. - ``Operation.metadata`` contains ``AnnotateVideoProgress`` (progress). - ``Operation.response`` contains ``AnnotateVideoResponse`` (results). + - Translation - TextSnippet, content up to 25,000 characters, UTF-8 + encoded. Returns: Callable: A callable which accepts the appropriate deserialized request object and returns a deserialized response object. """ - return self._stubs["video_intelligence_service_stub"].AnnotateVideo + return self._stubs["prediction_service_stub"].Predict diff --git a/oslogin/google/cloud/oslogin_v1/proto/oslogin/common/__init__.py b/automl/google/cloud/automl_v1/proto/__init__.py similarity index 100% rename from oslogin/google/cloud/oslogin_v1/proto/oslogin/common/__init__.py rename to automl/google/cloud/automl_v1/proto/__init__.py diff --git a/automl/google/cloud/automl_v1/proto/annotation_payload.proto b/automl/google/cloud/automl_v1/proto/annotation_payload.proto new file mode 100644 index 000000000000..9469c2618a49 --- /dev/null +++ b/automl/google/cloud/automl_v1/proto/annotation_payload.proto @@ -0,0 +1,39 @@ +// Copyright 2019 Google LLC. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +syntax = "proto3"; + +package google.cloud.automl.v1; + +import "google/cloud/automl/v1/translation.proto"; +import "google/protobuf/any.proto"; +import "google/api/annotations.proto"; + +option go_package = "google.golang.org/genproto/googleapis/cloud/automl/v1;automl"; +option csharp_namespace = "Google.Cloud.AutoML.V1"; +option java_multiple_files = true; +option java_package = "com.google.cloud.automl.v1"; +option php_namespace = "Google\\Cloud\\AutoML\\V1"; +option ruby_package = "Google::Cloud::AutoML::V1"; + +// Contains annotation information that is relevant to AutoML. +message AnnotationPayload { + // Output only . Additional information about the annotation + // specific to the AutoML domain. + oneof detail { + // Annotation details for translation. + TranslationAnnotation translation = 2; + } +} diff --git a/automl/google/cloud/automl_v1/proto/annotation_payload_pb2.py b/automl/google/cloud/automl_v1/proto/annotation_payload_pb2.py new file mode 100644 index 000000000000..9f027e70e315 --- /dev/null +++ b/automl/google/cloud/automl_v1/proto/annotation_payload_pb2.py @@ -0,0 +1,126 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/automl_v1/proto/annotation_payload.proto + +import sys + +_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.cloud.automl_v1.proto import ( + translation_pb2 as google_dot_cloud_dot_automl__v1_dot_proto_dot_translation__pb2, +) +from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2 +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name="google/cloud/automl_v1/proto/annotation_payload.proto", + package="google.cloud.automl.v1", + syntax="proto3", + serialized_options=_b( + "\n\032com.google.cloud.automl.v1P\001Z labels = 39; +} diff --git a/automl/google/cloud/automl_v1/proto/dataset_pb2.py b/automl/google/cloud/automl_v1/proto/dataset_pb2.py new file mode 100644 index 000000000000..95d16ad188e8 --- /dev/null +++ b/automl/google/cloud/automl_v1/proto/dataset_pb2.py @@ -0,0 +1,352 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/automl_v1/proto/dataset.proto + +import sys + +_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.cloud.automl_v1.proto import ( + translation_pb2 as google_dot_cloud_dot_automl__v1_dot_proto_dot_translation__pb2, +) +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name="google/cloud/automl_v1/proto/dataset.proto", + package="google.cloud.automl.v1", + syntax="proto3", + serialized_options=_b( + "\n\032com.google.cloud.automl.v1P\001Z params = 2; +} + +// * For Translation: +// CSV file `translation.csv`, with each line in format: +// ML_USE,GCS_FILE_PATH +// GCS_FILE_PATH leads to a .TSV file which describes examples that have +// given ML_USE, using the following row format per line: +// TEXT_SNIPPET (in source language) \t TEXT_SNIPPET (in target +// language) +// +// `export_data__` +// where will be made +// BigQuery-dataset-name compatible (e.g. most special characters will +// become underscores), and timestamp will be in +// YYYY_MM_DDThh_mm_ss_sssZ "based on ISO-8601" format. In that +// dataset a new table called `primary_table` will be created, and +// filled with precisely the same data as this obtained on import. +message OutputConfig { + // Required. The destination of the output. + oneof destination { + // The Google Cloud Storage location where the output is to be written to. + // For Image Object Detection, Text Extraction, Video Classification and + // Tables, in the given directory a new directory will be created with name: + // export_data-- where + // timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format. All export + // output will be written into that directory. + GcsDestination gcs_destination = 1; + } +} + +// The Google Cloud Storage location for the input content. +message GcsSource { + // Required. Google Cloud Storage URIs to input files, up to 2000 characters + // long. Accepted forms: + // * Full object path, e.g. gs://bucket/directory/object.csv + repeated string input_uris = 1; +} + +// The Google Cloud Storage location where the output is to be written to. +message GcsDestination { + // Required. Google Cloud Storage URI to output directory, up to 2000 + // characters long. + // Accepted forms: + // * Prefix path: gs://bucket/directory + // The requesting user must have write permission to the bucket. + // The directory is created if it doesn't exist. + string output_uri_prefix = 1; +} diff --git a/automl/google/cloud/automl_v1/proto/io_pb2.py b/automl/google/cloud/automl_v1/proto/io_pb2.py new file mode 100644 index 000000000000..6413e9cb34a7 --- /dev/null +++ b/automl/google/cloud/automl_v1/proto/io_pb2.py @@ -0,0 +1,437 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/automl_v1/proto/io.proto + +import sys + +_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name="google/cloud/automl_v1/proto/io.proto", + package="google.cloud.automl.v1", + syntax="proto3", + serialized_options=_b( + "\n\032com.google.cloud.automl.v1P\001Z_`` + where will be made BigQuery-dataset-name compatible (e.g. most special + characters will become underscores), and timestamp will be in + YYYY\_MM\_DDThh\_mm\_ss\_sssZ "based on ISO-8601" format. In that + dataset a new table called ``primary_table`` will be created, and filled + with precisely the same data as this obtained on import. + + + Attributes: + destination: + Required. The destination of the output. + gcs_destination: + The Google Cloud Storage location where the output is to be + written to. For Image Object Detection, Text Extraction, Video + Classification and Tables, in the given directory a new + directory will be created with name: export\_data-- where + timestamp is in YYYY-MM-DDThh:mm:ss.sssZ ISO-8601 format. All + export output will be written into that directory. + """, + # @@protoc_insertion_point(class_scope:google.cloud.automl.v1.OutputConfig) + ), +) +_sym_db.RegisterMessage(OutputConfig) + +GcsSource = _reflection.GeneratedProtocolMessageType( + "GcsSource", + (_message.Message,), + dict( + DESCRIPTOR=_GCSSOURCE, + __module__="google.cloud.automl_v1.proto.io_pb2", + __doc__="""The Google Cloud Storage location for the input content. + + + Attributes: + input_uris: + Required. Google Cloud Storage URIs to input files, up to 2000 + characters long. Accepted forms: \* Full object path, e.g. + gs://bucket/directory/object.csv + """, + # @@protoc_insertion_point(class_scope:google.cloud.automl.v1.GcsSource) + ), +) +_sym_db.RegisterMessage(GcsSource) + +GcsDestination = _reflection.GeneratedProtocolMessageType( + "GcsDestination", + (_message.Message,), + dict( + DESCRIPTOR=_GCSDESTINATION, + __module__="google.cloud.automl_v1.proto.io_pb2", + __doc__="""The Google Cloud Storage location where the output is to be written to. + + + Attributes: + output_uri_prefix: + Required. Google Cloud Storage URI to output directory, up to + 2000 characters long. Accepted forms: \* Prefix path: + gs://bucket/directory The requesting user must have write + permission to the bucket. The directory is created if it + doesn't exist. + """, + # @@protoc_insertion_point(class_scope:google.cloud.automl.v1.GcsDestination) + ), +) +_sym_db.RegisterMessage(GcsDestination) + + +DESCRIPTOR._options = None +_INPUTCONFIG_PARAMSENTRY._options = None +# @@protoc_insertion_point(module_scope) diff --git a/oslogin/google/cloud/oslogin_v1/proto/oslogin/common/common_pb2_grpc.py b/automl/google/cloud/automl_v1/proto/io_pb2_grpc.py similarity index 100% rename from oslogin/google/cloud/oslogin_v1/proto/oslogin/common/common_pb2_grpc.py rename to automl/google/cloud/automl_v1/proto/io_pb2_grpc.py diff --git a/automl/google/cloud/automl_v1/proto/model.proto b/automl/google/cloud/automl_v1/proto/model.proto new file mode 100644 index 000000000000..5f820b42001e --- /dev/null +++ b/automl/google/cloud/automl_v1/proto/model.proto @@ -0,0 +1,86 @@ +// Copyright 2019 Google LLC. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +syntax = "proto3"; + +package google.cloud.automl.v1; + +import "google/cloud/automl/v1/translation.proto"; +import "google/protobuf/timestamp.proto"; +import "google/api/annotations.proto"; + +option go_package = "google.golang.org/genproto/googleapis/cloud/automl/v1;automl"; +option csharp_namespace = "Google.Cloud.AutoML.V1"; +option java_multiple_files = true; +option java_package = "com.google.cloud.automl.v1"; +option php_namespace = "Google\\Cloud\\AutoML\\V1"; +option ruby_package = "Google::Cloud::AutoML::V1"; + +// API proto representing a trained machine learning model. +message Model { + // Deployment state of the model. + enum DeploymentState { + // Should not be used, an un-set enum has this value by default. + DEPLOYMENT_STATE_UNSPECIFIED = 0; + + // Model is deployed. + DEPLOYED = 1; + + // Model is not deployed. + UNDEPLOYED = 2; + } + + // Required. + // The model metadata that is specific to the problem type. + // Must match the metadata type of the dataset used to train the model. + oneof model_metadata { + // Metadata for translation models. + TranslationModelMetadata translation_model_metadata = 15; + } + + // Output only. Resource name of the model. + // Format: `projects/{project_id}/locations/{location_id}/models/{model_id}` + string name = 1; + + // Required. The name of the model to show in the interface. The name can be + // up to 32 characters long and can consist only of ASCII Latin letters A-Z + // and a-z, underscores + // (_), and ASCII digits 0-9. It must start with a letter. + string display_name = 2; + + // Required. The resource ID of the dataset used to create the model. The dataset must + // come from the same ancestor project and location. + string dataset_id = 3; + + // Output only. Timestamp when the model training finished and can be used for prediction. + google.protobuf.Timestamp create_time = 7; + + // Output only. Timestamp when this model was last updated. + google.protobuf.Timestamp update_time = 11; + + // Output only. Deployment state of the model. A model can only serve + // prediction requests after it gets deployed. + DeploymentState deployment_state = 8; + + // Optional. The labels with user-defined metadata to organize your model. + // + // Label keys and values can be no longer than 64 characters + // (Unicode codepoints), can only contain lowercase letters, numeric + // characters, underscores and dashes. International characters are allowed. + // Label values are optional. Label keys must start with a letter. + // + // See https://goo.gl/xmQnxf for more information on and examples of labels. + map labels = 34; +} diff --git a/automl/google/cloud/automl_v1/proto/model_evaluation.proto b/automl/google/cloud/automl_v1/proto/model_evaluation.proto new file mode 100644 index 000000000000..fe9df1b94887 --- /dev/null +++ b/automl/google/cloud/automl_v1/proto/model_evaluation.proto @@ -0,0 +1,62 @@ +// Copyright 2019 Google LLC. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +syntax = "proto3"; + +package google.cloud.automl.v1; + +import "google/cloud/automl/v1/translation.proto"; +import "google/protobuf/timestamp.proto"; +import "google/api/annotations.proto"; + +option go_package = "google.golang.org/genproto/googleapis/cloud/automl/v1;automl"; +option csharp_namespace = "Google.Cloud.AutoML.V1"; +option java_multiple_files = true; +option java_package = "com.google.cloud.automl.v1"; +option php_namespace = "Google\\Cloud\\AutoML\\V1"; +option ruby_package = "Google::Cloud::AutoML::V1"; + +// Evaluation results of a model. +message ModelEvaluation { + // Output only. Problem type specific evaluation metrics. + oneof metrics { + // Model evaluation metrics for translation. + TranslationEvaluationMetrics translation_evaluation_metrics = 9; + } + + // Output only. Resource name of the model evaluation. + // Format: + // + // `projects/{project_id}/locations/{location_id}/models/{model_id}/modelEvaluations/{model_evaluation_id}` + string name = 1; + + // Output only. The ID of the annotation spec that the model evaluation applies to. The + // The ID is empty for the overall model evaluation. + string annotation_spec_id = 2; + + // Output only. Timestamp when this model evaluation was created. + google.protobuf.Timestamp create_time = 5; + + // Output only. The number of examples used for model evaluation, i.e. for + // which ground truth from time of model creation is compared against the + // predicted annotations created by the model. + // For overall ModelEvaluation (i.e. with annotation_spec_id not set) this is + // the total number of all examples used for evaluation. + // Otherwise, this is the count of examples that according to the ground + // truth were annotated by the + // + // [annotation_spec_id][google.cloud.automl.v1beta1.ModelEvaluation.annotation_spec_id]. + int32 evaluated_example_count = 6; +} diff --git a/automl/google/cloud/automl_v1/proto/model_evaluation_pb2.py b/automl/google/cloud/automl_v1/proto/model_evaluation_pb2.py new file mode 100644 index 000000000000..ec05252d574e --- /dev/null +++ b/automl/google/cloud/automl_v1/proto/model_evaluation_pb2.py @@ -0,0 +1,220 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/automl_v1/proto/model_evaluation.proto + +import sys + +_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.cloud.automl_v1.proto import ( + translation_pb2 as google_dot_cloud_dot_automl__v1_dot_proto_dot_translation__pb2, +) +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name="google/cloud/automl_v1/proto/model_evaluation.proto", + package="google.cloud.automl.v1", + syntax="proto3", + serialized_options=_b( + "\n\032com.google.cloud.automl.v1P\001Z params = 3; +} + +// Response message for +// [PredictionService.Predict][google.cloud.automl.v1.PredictionService.Predict]. +message PredictResponse { + // Prediction result. + // Translation and Text Sentiment will return precisely one payload. + repeated AnnotationPayload payload = 1; + + // Additional domain-specific prediction response metadata. + map metadata = 2; +} diff --git a/automl/google/cloud/automl_v1/proto/prediction_service_pb2.py b/automl/google/cloud/automl_v1/proto/prediction_service_pb2.py new file mode 100644 index 000000000000..9d438e5f321a --- /dev/null +++ b/automl/google/cloud/automl_v1/proto/prediction_service_pb2.py @@ -0,0 +1,422 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/automl_v1/proto/prediction_service.proto + +import sys + +_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.cloud.automl_v1.proto import ( + annotation_payload_pb2 as google_dot_cloud_dot_automl__v1_dot_proto_dot_annotation__payload__pb2, +) +from google.cloud.automl_v1.proto import ( + data_items_pb2 as google_dot_cloud_dot_automl__v1_dot_proto_dot_data__items__pb2, +) +from google.cloud.automl_v1.proto import ( + io_pb2 as google_dot_cloud_dot_automl__v1_dot_proto_dot_io__pb2, +) +from google.cloud.automl_v1.proto import ( + operations_pb2 as google_dot_cloud_dot_automl__v1_dot_proto_dot_operations__pb2, +) +from google.longrunning import ( + operations_pb2 as google_dot_longrunning_dot_operations__pb2, +) + + +DESCRIPTOR = _descriptor.FileDescriptor( + name="google/cloud/automl_v1/proto/prediction_service.proto", + package="google.cloud.automl.v1", + syntax="proto3", + serialized_options=_b( + "\n\032com.google.cloud.automl.v1B\026PredictionServiceProtoP\001Z The dataset has + // translation_dataset_metadata. + string filter = 3; + + // Requested page size. Server may return fewer results than requested. + // If unspecified, server will pick a default size. + int32 page_size = 4; + + // A token identifying a page of results for the server to return + // Typically obtained via + // [ListDatasetsResponse.next_page_token][google.cloud.automl.v1.ListDatasetsResponse.next_page_token] + // of the previous + // [AutoMl.ListDatasets][google.cloud.automl.v1.AutoMl.ListDatasets] call. + string page_token = 6; +} + +// Response message for +// [AutoMl.ListDatasets][google.cloud.automl.v1.AutoMl.ListDatasets]. +message ListDatasetsResponse { + // The datasets read. + repeated Dataset datasets = 1; + + // A token to retrieve next page of results. + // Pass to + // [ListDatasetsRequest.page_token][google.cloud.automl.v1.ListDatasetsRequest.page_token] + // to obtain that page. + string next_page_token = 2; +} + +// Request message for +// [AutoMl.UpdateDataset][google.cloud.automl.v1.AutoMl.UpdateDataset] +message UpdateDatasetRequest { + // The dataset which replaces the resource on the server. + Dataset dataset = 1; + + // Required. The update mask applies to the resource. + google.protobuf.FieldMask update_mask = 2; +} + +// Request message for +// [AutoMl.DeleteDataset][google.cloud.automl.v1.AutoMl.DeleteDataset]. +message DeleteDatasetRequest { + // The resource name of the dataset to delete. + string name = 1; +} + +// Request message for +// [AutoMl.ImportData][google.cloud.automl.v1.AutoMl.ImportData]. +message ImportDataRequest { + // Required. Dataset name. Dataset must already exist. All imported + // annotations and examples will be added. + string name = 1; + + // Required. The desired input location and its domain specific semantics, + // if any. + InputConfig input_config = 3; +} + +// Request message for +// [AutoMl.ExportData][google.cloud.automl.v1.AutoMl.ExportData]. +message ExportDataRequest { + // Required. The resource name of the dataset. + string name = 1; + + // Required. The desired output location. + OutputConfig output_config = 3; +} + +// Request message for +// [AutoMl.CreateModel][google.cloud.automl.v1.AutoMl.CreateModel]. +message CreateModelRequest { + // Resource name of the parent project where the model is being created. + string parent = 1; + + // The model to create. + Model model = 4; +} + +// Request message for +// [AutoMl.GetModel][google.cloud.automl.v1.AutoMl.GetModel]. +message GetModelRequest { + // Resource name of the model. + string name = 1; +} + +// Request message for +// [AutoMl.ListModels][google.cloud.automl.v1.AutoMl.ListModels]. +message ListModelsRequest { + // Resource name of the project, from which to list the models. + string parent = 1; + + // An expression for filtering the results of the request. + // + // * `model_metadata` - for existence of the case (e.g. + // video_classification_model_metadata:*). + // * `dataset_id` - for = or !=. Some examples of using the filter are: + // + // * `image_classification_model_metadata:*` --> The model has + // image_classification_model_metadata. + // * `dataset_id=5` --> The model was created from a dataset with ID 5. + string filter = 3; + + // Requested page size. + int32 page_size = 4; + + // A token identifying a page of results for the server to return + // Typically obtained via + // [ListModelsResponse.next_page_token][google.cloud.automl.v1.ListModelsResponse.next_page_token] + // of the previous + // [AutoMl.ListModels][google.cloud.automl.v1.AutoMl.ListModels] call. + string page_token = 6; +} + +// Response message for +// [AutoMl.ListModels][google.cloud.automl.v1.AutoMl.ListModels]. +message ListModelsResponse { + // List of models in the requested page. + repeated Model model = 1; + + // A token to retrieve next page of results. + // Pass to + // [ListModelsRequest.page_token][google.cloud.automl.v1.ListModelsRequest.page_token] + // to obtain that page. + string next_page_token = 2; +} + +// Request message for +// [AutoMl.DeleteModel][google.cloud.automl.v1.AutoMl.DeleteModel]. +message DeleteModelRequest { + // Resource name of the model being deleted. + string name = 1; +} + +// Request message for +// [AutoMl.UpdateModel][google.cloud.automl.v1.AutoMl.UpdateModel] +message UpdateModelRequest { + // The model which replaces the resource on the server. + Model model = 1; + + // Required. The update mask applies to the resource. + google.protobuf.FieldMask update_mask = 2; +} + +// Request message for +// [AutoMl.GetModelEvaluation][google.cloud.automl.v1.AutoMl.GetModelEvaluation]. +message GetModelEvaluationRequest { + // Resource name for the model evaluation. + string name = 1; +} + +// Request message for +// [AutoMl.ListModelEvaluations][google.cloud.automl.v1.AutoMl.ListModelEvaluations]. +message ListModelEvaluationsRequest { + // Resource name of the model to list the model evaluations for. + // If modelId is set as "-", this will list model evaluations from across all + // models of the parent location. + string parent = 1; + + // An expression for filtering the results of the request. + // + // * `annotation_spec_id` - for =, != or existence. See example below for + // the last. + // + // Some examples of using the filter are: + // + // * `annotation_spec_id!=4` --> The model evaluation was done for + // annotation spec with ID different than 4. + // * `NOT annotation_spec_id:*` --> The model evaluation was done for + // aggregate of all annotation specs. + string filter = 3; + + // Requested page size. + int32 page_size = 4; + + // A token identifying a page of results for the server to return. + // Typically obtained via + // [ListModelEvaluationsResponse.next_page_token][google.cloud.automl.v1.ListModelEvaluationsResponse.next_page_token] + // of the previous + // [AutoMl.ListModelEvaluations][google.cloud.automl.v1.AutoMl.ListModelEvaluations] + // call. + string page_token = 6; +} + +// Response message for +// [AutoMl.ListModelEvaluations][google.cloud.automl.v1.AutoMl.ListModelEvaluations]. +message ListModelEvaluationsResponse { + // List of model evaluations in the requested page. + repeated ModelEvaluation model_evaluation = 1; + + // A token to retrieve next page of results. + // Pass to the + // [ListModelEvaluationsRequest.page_token][google.cloud.automl.v1.ListModelEvaluationsRequest.page_token] + // field of a new + // [AutoMl.ListModelEvaluations][google.cloud.automl.v1.AutoMl.ListModelEvaluations] + // request to obtain that page. + string next_page_token = 2; +} diff --git a/automl/google/cloud/automl_v1/proto/service_pb2.py b/automl/google/cloud/automl_v1/proto/service_pb2.py new file mode 100644 index 000000000000..093dfb1f072b --- /dev/null +++ b/automl/google/cloud/automl_v1/proto/service_pb2.py @@ -0,0 +1,1693 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/automl_v1/proto/service.proto + +import sys + +_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.cloud.automl_v1.proto import ( + annotation_payload_pb2 as google_dot_cloud_dot_automl__v1_dot_proto_dot_annotation__payload__pb2, +) +from google.cloud.automl_v1.proto import ( + dataset_pb2 as google_dot_cloud_dot_automl__v1_dot_proto_dot_dataset__pb2, +) +from google.cloud.automl_v1.proto import ( + io_pb2 as google_dot_cloud_dot_automl__v1_dot_proto_dot_io__pb2, +) +from google.cloud.automl_v1.proto import ( + model_pb2 as google_dot_cloud_dot_automl__v1_dot_proto_dot_model__pb2, +) +from google.cloud.automl_v1.proto import ( + model_evaluation_pb2 as google_dot_cloud_dot_automl__v1_dot_proto_dot_model__evaluation__pb2, +) +from google.cloud.automl_v1.proto import ( + operations_pb2 as google_dot_cloud_dot_automl__v1_dot_proto_dot_operations__pb2, +) +from google.longrunning import ( + operations_pb2 as google_dot_longrunning_dot_operations__pb2, +) +from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name="google/cloud/automl_v1/proto/service.proto", + package="google.cloud.automl.v1", + syntax="proto3", + serialized_options=_b( + "\n\032com.google.cloud.automl.v1B\013AutoMlProtoP\001Z The dataset has + translation\_dataset\_metadata. + page_size: + Requested page size. Server may return fewer results than + requested. If unspecified, server will pick a default size. + page_token: + A token identifying a page of results for the server to return + Typically obtained via [ListDatasetsResponse.next\_page\_token + ][google.cloud.automl.v1.ListDatasetsResponse.next\_page\_toke + n] of the previous [AutoMl.ListDatasets][google.cloud.automl.v + 1.AutoMl.ListDatasets] call. + """, + # @@protoc_insertion_point(class_scope:google.cloud.automl.v1.ListDatasetsRequest) + ), +) +_sym_db.RegisterMessage(ListDatasetsRequest) + +ListDatasetsResponse = _reflection.GeneratedProtocolMessageType( + "ListDatasetsResponse", + (_message.Message,), + dict( + DESCRIPTOR=_LISTDATASETSRESPONSE, + __module__="google.cloud.automl_v1.proto.service_pb2", + __doc__="""Response message for + [AutoMl.ListDatasets][google.cloud.automl.v1.AutoMl.ListDatasets]. + + + Attributes: + datasets: + The datasets read. + next_page_token: + A token to retrieve next page of results. Pass to [ListDataset + sRequest.page\_token][google.cloud.automl.v1.ListDatasetsReque + st.page\_token] to obtain that page. + """, + # @@protoc_insertion_point(class_scope:google.cloud.automl.v1.ListDatasetsResponse) + ), +) +_sym_db.RegisterMessage(ListDatasetsResponse) + +UpdateDatasetRequest = _reflection.GeneratedProtocolMessageType( + "UpdateDatasetRequest", + (_message.Message,), + dict( + DESCRIPTOR=_UPDATEDATASETREQUEST, + __module__="google.cloud.automl_v1.proto.service_pb2", + __doc__="""Request message for + [AutoMl.UpdateDataset][google.cloud.automl.v1.AutoMl.UpdateDataset] + + + Attributes: + dataset: + The dataset which replaces the resource on the server. + update_mask: + Required. The update mask applies to the resource. + """, + # @@protoc_insertion_point(class_scope:google.cloud.automl.v1.UpdateDatasetRequest) + ), +) +_sym_db.RegisterMessage(UpdateDatasetRequest) + +DeleteDatasetRequest = _reflection.GeneratedProtocolMessageType( + "DeleteDatasetRequest", + (_message.Message,), + dict( + DESCRIPTOR=_DELETEDATASETREQUEST, + __module__="google.cloud.automl_v1.proto.service_pb2", + __doc__="""Request message for + [AutoMl.DeleteDataset][google.cloud.automl.v1.AutoMl.DeleteDataset]. + + + Attributes: + name: + The resource name of the dataset to delete. + """, + # @@protoc_insertion_point(class_scope:google.cloud.automl.v1.DeleteDatasetRequest) + ), +) +_sym_db.RegisterMessage(DeleteDatasetRequest) + +ImportDataRequest = _reflection.GeneratedProtocolMessageType( + "ImportDataRequest", + (_message.Message,), + dict( + DESCRIPTOR=_IMPORTDATAREQUEST, + __module__="google.cloud.automl_v1.proto.service_pb2", + __doc__="""Request message for + [AutoMl.ImportData][google.cloud.automl.v1.AutoMl.ImportData]. + + + Attributes: + name: + Required. Dataset name. Dataset must already exist. All + imported annotations and examples will be added. + input_config: + Required. The desired input location and its domain specific + semantics, if any. + """, + # @@protoc_insertion_point(class_scope:google.cloud.automl.v1.ImportDataRequest) + ), +) +_sym_db.RegisterMessage(ImportDataRequest) + +ExportDataRequest = _reflection.GeneratedProtocolMessageType( + "ExportDataRequest", + (_message.Message,), + dict( + DESCRIPTOR=_EXPORTDATAREQUEST, + __module__="google.cloud.automl_v1.proto.service_pb2", + __doc__="""Request message for + [AutoMl.ExportData][google.cloud.automl.v1.AutoMl.ExportData]. + + + Attributes: + name: + Required. The resource name of the dataset. + output_config: + Required. The desired output location. + """, + # @@protoc_insertion_point(class_scope:google.cloud.automl.v1.ExportDataRequest) + ), +) +_sym_db.RegisterMessage(ExportDataRequest) + +CreateModelRequest = _reflection.GeneratedProtocolMessageType( + "CreateModelRequest", + (_message.Message,), + dict( + DESCRIPTOR=_CREATEMODELREQUEST, + __module__="google.cloud.automl_v1.proto.service_pb2", + __doc__="""Request message for + [AutoMl.CreateModel][google.cloud.automl.v1.AutoMl.CreateModel]. + + + Attributes: + parent: + Resource name of the parent project where the model is being + created. + model: + The model to create. + """, + # @@protoc_insertion_point(class_scope:google.cloud.automl.v1.CreateModelRequest) + ), +) +_sym_db.RegisterMessage(CreateModelRequest) + +GetModelRequest = _reflection.GeneratedProtocolMessageType( + "GetModelRequest", + (_message.Message,), + dict( + DESCRIPTOR=_GETMODELREQUEST, + __module__="google.cloud.automl_v1.proto.service_pb2", + __doc__="""Request message for + [AutoMl.GetModel][google.cloud.automl.v1.AutoMl.GetModel]. + + + Attributes: + name: + Resource name of the model. + """, + # @@protoc_insertion_point(class_scope:google.cloud.automl.v1.GetModelRequest) + ), +) +_sym_db.RegisterMessage(GetModelRequest) + +ListModelsRequest = _reflection.GeneratedProtocolMessageType( + "ListModelsRequest", + (_message.Message,), + dict( + DESCRIPTOR=_LISTMODELSREQUEST, + __module__="google.cloud.automl_v1.proto.service_pb2", + __doc__="""Request message for + [AutoMl.ListModels][google.cloud.automl.v1.AutoMl.ListModels]. + + + Attributes: + parent: + Resource name of the project, from which to list the models. + filter: + An expression for filtering the results of the request. - + ``model_metadata`` - for existence of the case (e.g. + video\_classification\_model\_metadata:\*). - ``dataset_id`` + - for = or !=. Some examples of using the filter are: - + ``image_classification_model_metadata:*`` --> The model has + image\_classification\_model\_metadata. - ``dataset_id=5`` + --> The model was created from a dataset with ID 5. + page_size: + Requested page size. + page_token: + A token identifying a page of results for the server to return + Typically obtained via [ListModelsResponse.next\_page\_token][ + google.cloud.automl.v1.ListModelsResponse.next\_page\_token] + of the previous + [AutoMl.ListModels][google.cloud.automl.v1.AutoMl.ListModels] + call. + """, + # @@protoc_insertion_point(class_scope:google.cloud.automl.v1.ListModelsRequest) + ), +) +_sym_db.RegisterMessage(ListModelsRequest) + +ListModelsResponse = _reflection.GeneratedProtocolMessageType( + "ListModelsResponse", + (_message.Message,), + dict( + DESCRIPTOR=_LISTMODELSRESPONSE, + __module__="google.cloud.automl_v1.proto.service_pb2", + __doc__="""Response message for + [AutoMl.ListModels][google.cloud.automl.v1.AutoMl.ListModels]. + + + Attributes: + model: + List of models in the requested page. + next_page_token: + A token to retrieve next page of results. Pass to [ListModelsR + equest.page\_token][google.cloud.automl.v1.ListModelsRequest.p + age\_token] to obtain that page. + """, + # @@protoc_insertion_point(class_scope:google.cloud.automl.v1.ListModelsResponse) + ), +) +_sym_db.RegisterMessage(ListModelsResponse) + +DeleteModelRequest = _reflection.GeneratedProtocolMessageType( + "DeleteModelRequest", + (_message.Message,), + dict( + DESCRIPTOR=_DELETEMODELREQUEST, + __module__="google.cloud.automl_v1.proto.service_pb2", + __doc__="""Request message for + [AutoMl.DeleteModel][google.cloud.automl.v1.AutoMl.DeleteModel]. + + + Attributes: + name: + Resource name of the model being deleted. + """, + # @@protoc_insertion_point(class_scope:google.cloud.automl.v1.DeleteModelRequest) + ), +) +_sym_db.RegisterMessage(DeleteModelRequest) + +UpdateModelRequest = _reflection.GeneratedProtocolMessageType( + "UpdateModelRequest", + (_message.Message,), + dict( + DESCRIPTOR=_UPDATEMODELREQUEST, + __module__="google.cloud.automl_v1.proto.service_pb2", + __doc__="""Request message for + [AutoMl.UpdateModel][google.cloud.automl.v1.AutoMl.UpdateModel] + + + Attributes: + model: + The model which replaces the resource on the server. + update_mask: + Required. The update mask applies to the resource. + """, + # @@protoc_insertion_point(class_scope:google.cloud.automl.v1.UpdateModelRequest) + ), +) +_sym_db.RegisterMessage(UpdateModelRequest) + +GetModelEvaluationRequest = _reflection.GeneratedProtocolMessageType( + "GetModelEvaluationRequest", + (_message.Message,), + dict( + DESCRIPTOR=_GETMODELEVALUATIONREQUEST, + __module__="google.cloud.automl_v1.proto.service_pb2", + __doc__="""Request message for + [AutoMl.GetModelEvaluation][google.cloud.automl.v1.AutoMl.GetModelEvaluation]. + + + Attributes: + name: + Resource name for the model evaluation. + """, + # @@protoc_insertion_point(class_scope:google.cloud.automl.v1.GetModelEvaluationRequest) + ), +) +_sym_db.RegisterMessage(GetModelEvaluationRequest) + +ListModelEvaluationsRequest = _reflection.GeneratedProtocolMessageType( + "ListModelEvaluationsRequest", + (_message.Message,), + dict( + DESCRIPTOR=_LISTMODELEVALUATIONSREQUEST, + __module__="google.cloud.automl_v1.proto.service_pb2", + __doc__="""Request message for + [AutoMl.ListModelEvaluations][google.cloud.automl.v1.AutoMl.ListModelEvaluations]. + + + Attributes: + parent: + Resource name of the model to list the model evaluations for. + If modelId is set as "-", this will list model evaluations + from across all models of the parent location. + filter: + An expression for filtering the results of the request. - + ``annotation_spec_id`` - for =, != or existence. See example + below for the last. Some examples of using the filter are: + - ``annotation_spec_id!=4`` --> The model evaluation was done + for annotation spec with ID different than 4. - ``NOT + annotation_spec_id:*`` --> The model evaluation was done for + aggregate of all annotation specs. + page_size: + Requested page size. + page_token: + A token identifying a page of results for the server to + return. Typically obtained via [ListModelEvaluationsResponse.n + ext\_page\_token][google.cloud.automl.v1.ListModelEvaluationsR + esponse.next\_page\_token] of the previous [AutoMl.ListModelEv + aluations][google.cloud.automl.v1.AutoMl.ListModelEvaluations] + call. + """, + # @@protoc_insertion_point(class_scope:google.cloud.automl.v1.ListModelEvaluationsRequest) + ), +) +_sym_db.RegisterMessage(ListModelEvaluationsRequest) + +ListModelEvaluationsResponse = _reflection.GeneratedProtocolMessageType( + "ListModelEvaluationsResponse", + (_message.Message,), + dict( + DESCRIPTOR=_LISTMODELEVALUATIONSRESPONSE, + __module__="google.cloud.automl_v1.proto.service_pb2", + __doc__="""Response message for + [AutoMl.ListModelEvaluations][google.cloud.automl.v1.AutoMl.ListModelEvaluations]. + + + Attributes: + model_evaluation: + List of model evaluations in the requested page. + next_page_token: + A token to retrieve next page of results. Pass to the [ListMod + elEvaluationsRequest.page\_token][google.cloud.automl.v1.ListM + odelEvaluationsRequest.page\_token] field of a new [AutoMl.Lis + tModelEvaluations][google.cloud.automl.v1.AutoMl.ListModelEval + uations] request to obtain that page. + """, + # @@protoc_insertion_point(class_scope:google.cloud.automl.v1.ListModelEvaluationsResponse) + ), +) +_sym_db.RegisterMessage(ListModelEvaluationsResponse) + + +DESCRIPTOR._options = None + +_AUTOML = _descriptor.ServiceDescriptor( + name="AutoMl", + full_name="google.cloud.automl.v1.AutoMl", + file=DESCRIPTOR, + index=0, + serialized_options=_b( + "\312A\025automl.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform" + ), + serialized_start=1871, + serialized_end=4179, + methods=[ + _descriptor.MethodDescriptor( + name="CreateDataset", + full_name="google.cloud.automl.v1.AutoMl.CreateDataset", + index=0, + containing_service=None, + input_type=_CREATEDATASETREQUEST, + output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, + serialized_options=_b( + '\202\323\344\223\0027",/v1/{parent=projects/*/locations/*}/datasets:\007dataset' + ), + ), + _descriptor.MethodDescriptor( + name="GetDataset", + full_name="google.cloud.automl.v1.AutoMl.GetDataset", + index=1, + containing_service=None, + input_type=_GETDATASETREQUEST, + output_type=google_dot_cloud_dot_automl__v1_dot_proto_dot_dataset__pb2._DATASET, + serialized_options=_b( + "\202\323\344\223\002.\022,/v1/{name=projects/*/locations/*/datasets/*}" + ), + ), + _descriptor.MethodDescriptor( + name="ListDatasets", + full_name="google.cloud.automl.v1.AutoMl.ListDatasets", + index=2, + containing_service=None, + input_type=_LISTDATASETSREQUEST, + output_type=_LISTDATASETSRESPONSE, + serialized_options=_b( + "\202\323\344\223\002.\022,/v1/{parent=projects/*/locations/*}/datasets" + ), + ), + _descriptor.MethodDescriptor( + name="UpdateDataset", + full_name="google.cloud.automl.v1.AutoMl.UpdateDataset", + index=3, + containing_service=None, + input_type=_UPDATEDATASETREQUEST, + output_type=google_dot_cloud_dot_automl__v1_dot_proto_dot_dataset__pb2._DATASET, + serialized_options=_b( + "\202\323\344\223\002?24/v1/{dataset.name=projects/*/locations/*/datasets/*}:\007dataset" + ), + ), + _descriptor.MethodDescriptor( + name="DeleteDataset", + full_name="google.cloud.automl.v1.AutoMl.DeleteDataset", + index=4, + containing_service=None, + input_type=_DELETEDATASETREQUEST, + output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, + serialized_options=_b( + "\202\323\344\223\002.*,/v1/{name=projects/*/locations/*/datasets/*}" + ), + ), + _descriptor.MethodDescriptor( + name="ImportData", + full_name="google.cloud.automl.v1.AutoMl.ImportData", + index=5, + containing_service=None, + input_type=_IMPORTDATAREQUEST, + output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, + serialized_options=_b( + '\202\323\344\223\002<"7/v1/{name=projects/*/locations/*/datasets/*}:importData:\001*' + ), + ), + _descriptor.MethodDescriptor( + name="ExportData", + full_name="google.cloud.automl.v1.AutoMl.ExportData", + index=6, + containing_service=None, + input_type=_EXPORTDATAREQUEST, + output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, + serialized_options=_b( + '\202\323\344\223\002<"7/v1/{name=projects/*/locations/*/datasets/*}:exportData:\001*' + ), + ), + _descriptor.MethodDescriptor( + name="CreateModel", + full_name="google.cloud.automl.v1.AutoMl.CreateModel", + index=7, + containing_service=None, + input_type=_CREATEMODELREQUEST, + output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, + serialized_options=_b( + '\202\323\344\223\0023"*/v1/{parent=projects/*/locations/*}/models:\005model' + ), + ), + _descriptor.MethodDescriptor( + name="GetModel", + full_name="google.cloud.automl.v1.AutoMl.GetModel", + index=8, + containing_service=None, + input_type=_GETMODELREQUEST, + output_type=google_dot_cloud_dot_automl__v1_dot_proto_dot_model__pb2._MODEL, + serialized_options=_b( + "\202\323\344\223\002,\022*/v1/{name=projects/*/locations/*/models/*}" + ), + ), + _descriptor.MethodDescriptor( + name="ListModels", + full_name="google.cloud.automl.v1.AutoMl.ListModels", + index=9, + containing_service=None, + input_type=_LISTMODELSREQUEST, + output_type=_LISTMODELSRESPONSE, + serialized_options=_b( + "\202\323\344\223\002,\022*/v1/{parent=projects/*/locations/*}/models" + ), + ), + _descriptor.MethodDescriptor( + name="DeleteModel", + full_name="google.cloud.automl.v1.AutoMl.DeleteModel", + index=10, + containing_service=None, + input_type=_DELETEMODELREQUEST, + output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, + serialized_options=_b( + "\202\323\344\223\002,**/v1/{name=projects/*/locations/*/models/*}" + ), + ), + _descriptor.MethodDescriptor( + name="UpdateModel", + full_name="google.cloud.automl.v1.AutoMl.UpdateModel", + index=11, + containing_service=None, + input_type=_UPDATEMODELREQUEST, + output_type=google_dot_cloud_dot_automl__v1_dot_proto_dot_model__pb2._MODEL, + serialized_options=_b( + "\202\323\344\223\002920/v1/{model.name=projects/*/locations/*/models/*}:\005model" + ), + ), + _descriptor.MethodDescriptor( + name="GetModelEvaluation", + full_name="google.cloud.automl.v1.AutoMl.GetModelEvaluation", + index=12, + containing_service=None, + input_type=_GETMODELEVALUATIONREQUEST, + output_type=google_dot_cloud_dot_automl__v1_dot_proto_dot_model__evaluation__pb2._MODELEVALUATION, + serialized_options=_b( + "\202\323\344\223\002?\022=/v1/{name=projects/*/locations/*/models/*/modelEvaluations/*}" + ), + ), + _descriptor.MethodDescriptor( + name="ListModelEvaluations", + full_name="google.cloud.automl.v1.AutoMl.ListModelEvaluations", + index=13, + containing_service=None, + input_type=_LISTMODELEVALUATIONSREQUEST, + output_type=_LISTMODELEVALUATIONSRESPONSE, + serialized_options=_b( + "\202\323\344\223\002?\022=/v1/{parent=projects/*/locations/*/models/*}/modelEvaluations" + ), + ), + ], +) +_sym_db.RegisterServiceDescriptor(_AUTOML) + +DESCRIPTOR.services_by_name["AutoMl"] = _AUTOML + +# @@protoc_insertion_point(module_scope) diff --git a/automl/google/cloud/automl_v1/proto/service_pb2_grpc.py b/automl/google/cloud/automl_v1/proto/service_pb2_grpc.py new file mode 100644 index 000000000000..dd6beb5ca397 --- /dev/null +++ b/automl/google/cloud/automl_v1/proto/service_pb2_grpc.py @@ -0,0 +1,322 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +import grpc + +from google.cloud.automl_v1.proto import ( + dataset_pb2 as google_dot_cloud_dot_automl__v1_dot_proto_dot_dataset__pb2, +) +from google.cloud.automl_v1.proto import ( + model_evaluation_pb2 as google_dot_cloud_dot_automl__v1_dot_proto_dot_model__evaluation__pb2, +) +from google.cloud.automl_v1.proto import ( + model_pb2 as google_dot_cloud_dot_automl__v1_dot_proto_dot_model__pb2, +) +from google.cloud.automl_v1.proto import ( + service_pb2 as google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2, +) +from google.longrunning import ( + operations_pb2 as google_dot_longrunning_dot_operations__pb2, +) + + +class AutoMlStub(object): + """AutoML Server API. + + The resource names are assigned by the server. + The server never reuses names that it has created after the resources with + those names are deleted. + + An ID of a resource is the last element of the item's resource name. For + `projects/{project_id}/locations/{location_id}/datasets/{dataset_id}`, then + the id for the item is `{dataset_id}`. + + Currently the only supported `location_id` is "us-central1". + + On any input that is documented to expect a string parameter in + snake_case or kebab-case, either of those cases is accepted. + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.CreateDataset = channel.unary_unary( + "/google.cloud.automl.v1.AutoMl/CreateDataset", + request_serializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.CreateDatasetRequest.SerializeToString, + response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString, + ) + self.GetDataset = channel.unary_unary( + "/google.cloud.automl.v1.AutoMl/GetDataset", + request_serializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.GetDatasetRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_dataset__pb2.Dataset.FromString, + ) + self.ListDatasets = channel.unary_unary( + "/google.cloud.automl.v1.AutoMl/ListDatasets", + request_serializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.ListDatasetsRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.ListDatasetsResponse.FromString, + ) + self.UpdateDataset = channel.unary_unary( + "/google.cloud.automl.v1.AutoMl/UpdateDataset", + request_serializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.UpdateDatasetRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_dataset__pb2.Dataset.FromString, + ) + self.DeleteDataset = channel.unary_unary( + "/google.cloud.automl.v1.AutoMl/DeleteDataset", + request_serializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.DeleteDatasetRequest.SerializeToString, + response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString, + ) + self.ImportData = channel.unary_unary( + "/google.cloud.automl.v1.AutoMl/ImportData", + request_serializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.ImportDataRequest.SerializeToString, + response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString, + ) + self.ExportData = channel.unary_unary( + "/google.cloud.automl.v1.AutoMl/ExportData", + request_serializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.ExportDataRequest.SerializeToString, + response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString, + ) + self.CreateModel = channel.unary_unary( + "/google.cloud.automl.v1.AutoMl/CreateModel", + request_serializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.CreateModelRequest.SerializeToString, + response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString, + ) + self.GetModel = channel.unary_unary( + "/google.cloud.automl.v1.AutoMl/GetModel", + request_serializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.GetModelRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_model__pb2.Model.FromString, + ) + self.ListModels = channel.unary_unary( + "/google.cloud.automl.v1.AutoMl/ListModels", + request_serializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.ListModelsRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.ListModelsResponse.FromString, + ) + self.DeleteModel = channel.unary_unary( + "/google.cloud.automl.v1.AutoMl/DeleteModel", + request_serializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.DeleteModelRequest.SerializeToString, + response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString, + ) + self.UpdateModel = channel.unary_unary( + "/google.cloud.automl.v1.AutoMl/UpdateModel", + request_serializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.UpdateModelRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_model__pb2.Model.FromString, + ) + self.GetModelEvaluation = channel.unary_unary( + "/google.cloud.automl.v1.AutoMl/GetModelEvaluation", + request_serializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.GetModelEvaluationRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_model__evaluation__pb2.ModelEvaluation.FromString, + ) + self.ListModelEvaluations = channel.unary_unary( + "/google.cloud.automl.v1.AutoMl/ListModelEvaluations", + request_serializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.ListModelEvaluationsRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.ListModelEvaluationsResponse.FromString, + ) + + +class AutoMlServicer(object): + """AutoML Server API. + + The resource names are assigned by the server. + The server never reuses names that it has created after the resources with + those names are deleted. + + An ID of a resource is the last element of the item's resource name. For + `projects/{project_id}/locations/{location_id}/datasets/{dataset_id}`, then + the id for the item is `{dataset_id}`. + + Currently the only supported `location_id` is "us-central1". + + On any input that is documented to expect a string parameter in + snake_case or kebab-case, either of those cases is accepted. + """ + + def CreateDataset(self, request, context): + """Creates a dataset. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def GetDataset(self, request, context): + """Gets a dataset. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def ListDatasets(self, request, context): + """Lists datasets in a project. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def UpdateDataset(self, request, context): + """Updates a dataset. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def DeleteDataset(self, request, context): + """Deletes a dataset and all of its contents. + Returns empty response in the + [response][google.longrunning.Operation.response] field when it completes, + and `delete_details` in the + [metadata][google.longrunning.Operation.metadata] field. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def ImportData(self, request, context): + """Imports data into a dataset. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def ExportData(self, request, context): + """Exports dataset's data to the provided output location. + Returns an empty response in the + [response][google.longrunning.Operation.response] field when it completes. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def CreateModel(self, request, context): + """Creates a model. + Returns a Model in the [response][google.longrunning.Operation.response] + field when it completes. + When you create a model, several model evaluations are created for it: + a global evaluation, and one evaluation for each annotation spec. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def GetModel(self, request, context): + """Gets a model. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def ListModels(self, request, context): + """Lists models. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def DeleteModel(self, request, context): + """Deletes a model. + Returns `google.protobuf.Empty` in the + [response][google.longrunning.Operation.response] field when it completes, + and `delete_details` in the + [metadata][google.longrunning.Operation.metadata] field. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def UpdateModel(self, request, context): + """Updates a model. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def GetModelEvaluation(self, request, context): + """Gets a model evaluation. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def ListModelEvaluations(self, request, context): + """Lists model evaluations. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + +def add_AutoMlServicer_to_server(servicer, server): + rpc_method_handlers = { + "CreateDataset": grpc.unary_unary_rpc_method_handler( + servicer.CreateDataset, + request_deserializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.CreateDatasetRequest.FromString, + response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, + ), + "GetDataset": grpc.unary_unary_rpc_method_handler( + servicer.GetDataset, + request_deserializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.GetDatasetRequest.FromString, + response_serializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_dataset__pb2.Dataset.SerializeToString, + ), + "ListDatasets": grpc.unary_unary_rpc_method_handler( + servicer.ListDatasets, + request_deserializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.ListDatasetsRequest.FromString, + response_serializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.ListDatasetsResponse.SerializeToString, + ), + "UpdateDataset": grpc.unary_unary_rpc_method_handler( + servicer.UpdateDataset, + request_deserializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.UpdateDatasetRequest.FromString, + response_serializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_dataset__pb2.Dataset.SerializeToString, + ), + "DeleteDataset": grpc.unary_unary_rpc_method_handler( + servicer.DeleteDataset, + request_deserializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.DeleteDatasetRequest.FromString, + response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, + ), + "ImportData": grpc.unary_unary_rpc_method_handler( + servicer.ImportData, + request_deserializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.ImportDataRequest.FromString, + response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, + ), + "ExportData": grpc.unary_unary_rpc_method_handler( + servicer.ExportData, + request_deserializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.ExportDataRequest.FromString, + response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, + ), + "CreateModel": grpc.unary_unary_rpc_method_handler( + servicer.CreateModel, + request_deserializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.CreateModelRequest.FromString, + response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, + ), + "GetModel": grpc.unary_unary_rpc_method_handler( + servicer.GetModel, + request_deserializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.GetModelRequest.FromString, + response_serializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_model__pb2.Model.SerializeToString, + ), + "ListModels": grpc.unary_unary_rpc_method_handler( + servicer.ListModels, + request_deserializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.ListModelsRequest.FromString, + response_serializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.ListModelsResponse.SerializeToString, + ), + "DeleteModel": grpc.unary_unary_rpc_method_handler( + servicer.DeleteModel, + request_deserializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.DeleteModelRequest.FromString, + response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, + ), + "UpdateModel": grpc.unary_unary_rpc_method_handler( + servicer.UpdateModel, + request_deserializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.UpdateModelRequest.FromString, + response_serializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_model__pb2.Model.SerializeToString, + ), + "GetModelEvaluation": grpc.unary_unary_rpc_method_handler( + servicer.GetModelEvaluation, + request_deserializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.GetModelEvaluationRequest.FromString, + response_serializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_model__evaluation__pb2.ModelEvaluation.SerializeToString, + ), + "ListModelEvaluations": grpc.unary_unary_rpc_method_handler( + servicer.ListModelEvaluations, + request_deserializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.ListModelEvaluationsRequest.FromString, + response_serializer=google_dot_cloud_dot_automl__v1_dot_proto_dot_service__pb2.ListModelEvaluationsResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + "google.cloud.automl.v1.AutoMl", rpc_method_handlers + ) + server.add_generic_rpc_handlers((generic_handler,)) diff --git a/automl/google/cloud/automl_v1/proto/translation.proto b/automl/google/cloud/automl_v1/proto/translation.proto new file mode 100644 index 000000000000..bc449fe79f58 --- /dev/null +++ b/automl/google/cloud/automl_v1/proto/translation.proto @@ -0,0 +1,70 @@ +// Copyright 2019 Google LLC. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +syntax = "proto3"; + +package google.cloud.automl.v1; + +import "google/cloud/automl/v1/data_items.proto"; +import "google/api/annotations.proto"; + +option go_package = "google.golang.org/genproto/googleapis/cloud/automl/v1;automl"; +option csharp_namespace = "Google.Cloud.AutoML.V1"; +option java_multiple_files = true; +option java_outer_classname = "TranslationProto"; +option java_package = "com.google.cloud.automl.v1"; +option php_namespace = "Google\\Cloud\\AutoML\\V1"; +option ruby_package = "Google::Cloud::AutoML::V1"; + +// Dataset metadata that is specific to translation. +message TranslationDatasetMetadata { + // Required. The BCP-47 language code of the source language. + string source_language_code = 1; + + // Required. The BCP-47 language code of the target language. + string target_language_code = 2; +} + +// Evaluation metrics for the dataset. +message TranslationEvaluationMetrics { + // Output only. BLEU score. + double bleu_score = 1; + + // Output only. BLEU score for base model. + double base_bleu_score = 2; +} + +// Model metadata that is specific to translation. +message TranslationModelMetadata { + // The resource name of the model to use as a baseline to train the custom + // model. If unset, we use the default base model provided by Google + // Translate. Format: + // `projects/{project_id}/locations/{location_id}/models/{model_id}` + string base_model = 1; + + // Output only. Inferred from the dataset. + // The source languge (The BCP-47 language code) that is used for training. + string source_language_code = 2; + + // Output only. The target languge (The BCP-47 language code) that is used for + // training. + string target_language_code = 3; +} + +// Annotation details specific to translation. +message TranslationAnnotation { + // Output only . The translated content. + TextSnippet translated_content = 1; +} diff --git a/automl/google/cloud/automl_v1/proto/translation_pb2.py b/automl/google/cloud/automl_v1/proto/translation_pb2.py new file mode 100644 index 000000000000..4542dbc539d5 --- /dev/null +++ b/automl/google/cloud/automl_v1/proto/translation_pb2.py @@ -0,0 +1,370 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/automl_v1/proto/translation.proto + +import sys + +_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.cloud.automl_v1.proto import ( + data_items_pb2 as google_dot_cloud_dot_automl__v1_dot_proto_dot_data__items__pb2, +) +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name="google/cloud/automl_v1/proto/translation.proto", + package="google.cloud.automl.v1", + syntax="proto3", + serialized_options=_b( + "\n\032com.google.cloud.automl.v1B\020TranslationProtoP\001Z>> Args: - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that owns the + dataset. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. - dataset_name (Optional[string]): + dataset_name (Optional[str]): This is the fully-qualified name generated by the AutoML API for this dataset. This is not to be confused with the human-assigned `dataset_display_name` that is provided when creating a dataset. Either `dataset_name` or `dataset_display_name` must be provided. - dataset_display_name (Optional[string]): + dataset_display_name (Optional[str]): This is the name you provided for the dataset when first creating it. Either `dataset_name` or `dataset_display_name` must be provided. @@ -550,15 +557,15 @@ def create_dataset( >>> Args: - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that will own the + dataset. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. - dataset_display_name (string): + dataset_display_name (str): A human-readable name to refer to this dataset by. Returns: @@ -604,19 +611,19 @@ def delete_dataset( >>> Args: - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that owns the + dataset. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. - dataset_display_name (Optional[string]): + dataset_display_name (Optional[str]): The human-readable name given to the dataset you want to delete. This must be supplied if `dataset` or `dataset_name` are not supplied. - dataset_name (Optional[string]): + dataset_name (Optional[str]): The AutoML-assigned name given to the dataset you want to delete. This must be supplied if `dataset_display_name` or `dataset` are not supplied. @@ -626,8 +633,9 @@ def delete_dataset( supplied. Returns: - A :class:`~google.cloud.automl_v1beta1.types._OperationFuture` - instance. + google.api_core.operation.Operation: + An operation future that can be used to check for + completion synchronously or asynchronously. Raises: google.api_core.exceptions.GoogleAPICallError: If the request @@ -679,7 +687,7 @@ def import_data( ... >>> d = client.create_dataset(dataset_display_name='my_dataset') >>> - >>> client.import_data(dataset=d, + >>> response = client.import_data(dataset=d, ... gcs_input_uris='gs://cloud-ml-tables-data/bank-marketing.csv') ... >>> def callback(operation_future): @@ -689,12 +697,12 @@ def import_data( >>> Args: - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that owns the + dataset. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. credentials (Optional[google.auth.credentials.Credentials]): The @@ -702,11 +710,11 @@ def import_data( credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. - dataset_display_name (Optional[string]): + dataset_display_name (Optional[str]): The human-readable name given to the dataset you want to import data into. This must be supplied if `dataset` or `dataset_name` are not supplied. - dataset_name (Optional[string]): + dataset_name (Optional[str]): The AutoML-assigned name given to the dataset you want to import data into. This must be supplied if `dataset_display_name` or `dataset` are not supplied. @@ -720,19 +728,20 @@ def import_data( `gs://{project}-automl-tables-staging/{uploaded_csv_name}` This parameter must be supplied if neither `gcs_input_uris` nor `bigquery_input_uri` is supplied. - gcs_input_uris (Optional[Union[string, Sequence[string]]]): + gcs_input_uris (Optional[Union[str, Sequence[str]]]): Either a single `gs://..` prefixed URI, or a list of URIs referring to GCS-hosted CSV files containing the data to import. This must be supplied if neither `bigquery_input_uri` nor `pandas_dataframe` is supplied. - bigquery_input_uri (Optional[string]): + bigquery_input_uri (Optional[str]): A URI pointing to the BigQuery table containing the data to import. This must be supplied if neither `gcs_input_uris` nor `pandas_dataframe` is supplied. Returns: - A :class:`~google.cloud.automl_v1beta1.types._OperationFuture` - instance. + google.api_core.operation.Operation: + An operation future that can be used to check for + completion synchronously or asynchronously. Raises: google.api_core.exceptions.GoogleAPICallError: If the request @@ -753,7 +762,10 @@ def import_data( request = {} if pandas_dataframe is not None: - self.__ensure_gcs_client_is_initialized(credentials) + project = project or self.project + region = region or self.region + credentials = credentials or self.credentials + self.__ensure_gcs_client_is_initialized(credentials, project) self.gcs_client.ensure_bucket_exists(project, region) gcs_input_uri = self.gcs_client.upload_pandas_dataframe(pandas_dataframe) request = {"gcs_source": {"input_uris": [gcs_input_uri]}} @@ -796,7 +808,7 @@ def export_data( ... >>> d = client.create_dataset(dataset_display_name='my_dataset') >>> - >>> client.export_data(dataset=d, + >>> response = client.export_data(dataset=d, ... gcs_output_uri_prefix='gs://cloud-ml-tables-data/bank-marketing.csv') ... >>> def callback(operation_future): @@ -806,19 +818,19 @@ def export_data( >>> Args: - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that owns the + dataset. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. - dataset_display_name (Optional[string]): + dataset_display_name (Optional[str]): The human-readable name given to the dataset you want to export data from. This must be supplied if `dataset` or `dataset_name` are not supplied. - dataset_name (Optional[string]): + dataset_name (Optional[str]): The AutoML-assigned name given to the dataset you want to export data from. This must be supplied if `dataset_display_name` or `dataset` are not supplied. @@ -826,16 +838,17 @@ def export_data( The `Dataset` instance you want to export data from. This must be supplied if `dataset_display_name` or `dataset_name` are not supplied. - gcs_output_uri_prefix (Optional[Union[string, Sequence[string]]]): + gcs_output_uri_prefix (Optional[Union[str, Sequence[str]]]): A single `gs://..` prefixed URI to export to. This must be supplied if `bigquery_output_uri` is not. - bigquery_output_uri (Optional[string]): + bigquery_output_uri (Optional[str]): A URI pointing to the BigQuery table containing the data to export. This must be supplied if `gcs_output_uri_prefix` is not. Returns: - A :class:`~google.cloud.automl_v1beta1.types._OperationFuture` - instance. + google.api_core.operation.Operation: + An operation future that can be used to check for + completion synchronously or asynchronously. Raises: google.api_core.exceptions.GoogleAPICallError: If the request @@ -883,15 +896,15 @@ def get_table_spec(self, table_spec_name, project=None, region=None, **kwargs): >>> Args: - table_spec_name (string): + table_spec_name (str): This is the fully-qualified name generated by the AutoML API for this table spec. - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that owns the + table. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. @@ -933,19 +946,19 @@ def list_table_specs( ... Args: - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that owns the + dataset. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. - dataset_display_name (Optional[string]): + dataset_display_name (Optional[str]): The human-readable name given to the dataset you want to read specs from. This must be supplied if `dataset` or `dataset_name` are not supplied. - dataset_name (Optional[string]): + dataset_name (Optional[str]): The AutoML-assigned name given to the dataset you want to read specs from. This must be supplied if `dataset_display_name` or `dataset` are not supplied. @@ -995,15 +1008,15 @@ def get_column_spec(self, column_spec_name, project=None, region=None, **kwargs) >>> Args: - column_spec_name (string): + column_spec_name (str): This is the fully-qualified name generated by the AutoML API for this column spec. - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that owns the + column. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. @@ -1047,29 +1060,29 @@ def list_column_specs( ... Args: - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that owns the + columns. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. - table_spec_name (Optional[string]): + table_spec_name (Optional[str]): The AutoML-assigned name for the table whose specs you want to read. If not supplied, the client can determine this name from a source `Dataset` object. table_spec_index (Optional[int]): If no `table_spec_name` was provided, we use this index to determine which table to read column specs from. - dataset_display_name (Optional[string]): + dataset_display_name (Optional[str]): The human-readable name given to the dataset you want to read specs from. If no `table_spec_name` is supplied, this will be used together with `table_spec_index` to infer the name of table to read specs from. This must be supplied if `table_spec_name`, `dataset` or `dataset_name` are not supplied. - dataset_name (Optional[string]): + dataset_name (Optional[str]): The AutoML-assigned name given to the dataset you want to read specs from. If no `table_spec_name` is supplied, this will be used together with `table_spec_index` to infer the name of @@ -1145,50 +1158,57 @@ def update_column_spec( ... Args: - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): - If you have initialized the client with a value for `region` it - will be used if this parameter is not supplied. - column_spec_name (Optional[string]): - The name AutoML-assigned name for the column you want to - update. - column_spec_display_name (Optional[string]): - The human-readable name of the column you want to update. If - this is supplied in place of `column_spec_name`, you also need - to provide either a way to lookup the source dataset (using one - of the `dataset*` kwargs), or the `table_spec_name` of the - table this column belongs to. - table_spec_name (Optional[string]): - The AutoML-assigned name for the table whose specs you want to - update. If not supplied, the client can determine this name - from a source `Dataset` object. - table_spec_index (Optional[int]): - If no `table_spec_name` was provided, we use this index to - determine which table to update column specs on. - dataset_display_name (Optional[string]): + dataset (Optional[Dataset]): + The `Dataset` instance you want to update specs on. If no + `table_spec_name` is supplied, this will be used together with + `table_spec_index` to infer the name of table to update specs + on. This must be supplied if `table_spec_name`, `dataset_name` + or `dataset_display_name` are not supplied. + dataset_display_name (Optional[str]): The human-readable name given to the dataset you want to update specs on. If no `table_spec_name` is supplied, this will be used together with `table_spec_index` to infer the name of table to update specs on. This must be supplied if `table_spec_name`, `dataset` or `dataset_name` are not supplied. - dataset_name (Optional[string]): + dataset_name (Optional[str]): The AutoML-assigned name given to the dataset you want to update specs one. If no `table_spec_name` is supplied, this will be used together with `table_spec_index` to infer the name of table to update specs on. This must be supplied if `table_spec_name`, `dataset` or `dataset_display_name` are not supplied. - dataset (Optional[Dataset]): - The `Dataset` instance you want to update specs on. If no - `table_spec_name` is supplied, this will be used together with - `table_spec_index` to infer the name of table to update specs - on. This must be supplied if `table_spec_name`, `dataset_name` - or `dataset_display_name` are not supplied. + table_spec_name (Optional[str]): + The AutoML-assigned name for the table whose specs you want to + update. If not supplied, the client can determine this name + from a source `Dataset` object. + table_spec_index (Optional[int]): + If no `table_spec_name` was provided, we use this index to + determine which table to update column specs on. + column_spec_name (Optional[str]): + The name AutoML-assigned name for the column you want to + update. + column_spec_display_name (Optional[str]): + The human-readable name of the column you want to update. If + this is supplied in place of `column_spec_name`, you also need + to provide either a way to lookup the source dataset (using one + of the `dataset*` kwargs), or the `table_spec_name` of the + table this column belongs to. + type_code (Optional[str]): + The desired 'type_code' of the column. For more information + on the available types, please see the documentation: + https://cloud.google.com/automl-tables/docs/reference/rpc/google.cloud.automl.v1beta1#typecode + nullable (Optional[bool]): + Set to `True` or `False` to specify if this column's value + must expected to be present in all rows or not. + project (Optional[str]): The ID of the project that owns the + columns. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): + If you have initialized the client with a value for `region` it + will be used if this parameter is not supplied. Returns: A :class:`~google.cloud.automl_v1beta1.types.ColumnSpec` instance. @@ -1270,24 +1290,24 @@ def set_target_column( ... Args: - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that owns the + table. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. - column_spec_name (Optional[string]): + column_spec_name (Optional[str]): The name AutoML-assigned name for the column you want to set as the target column. - column_spec_display_name (Optional[string]): + column_spec_display_name (Optional[str]): The human-readable name of the column you want to set as the target column. If this is supplied in place of `column_spec_name`, you also need to provide either a way to lookup the source dataset (using one of the `dataset*` kwargs), or the `table_spec_name` of the table this column belongs to. - table_spec_name (Optional[string]): + table_spec_name (Optional[str]): The AutoML-assigned name for the table whose target column you want to set . If not supplied, the client can determine this name from a source `Dataset` object. @@ -1295,14 +1315,14 @@ def set_target_column( If no `table_spec_name` or `column_spec_name` was provided, we use this index to determine which table to set the target column on. - dataset_display_name (Optional[string]): + dataset_display_name (Optional[str]): The human-readable name given to the dataset you want to update the target column of. If no `table_spec_name` is supplied, this will be used together with `table_spec_index` to infer the name of table to update the target column of. This must be supplied if `table_spec_name`, `dataset` or `dataset_name` are not supplied. - dataset_name (Optional[string]): + dataset_name (Optional[str]): The AutoML-assigned name given to the dataset you want to update the target column of. If no `table_spec_name` is supplied, this will be used together with `table_spec_index` to @@ -1383,28 +1403,28 @@ def set_time_column( ... project='my-project', region='us-central1') ... >>> client.set_time_column(dataset_display_name='my_dataset', - ... column_spec_name='Unix Time') + ... column_spec_display_name='Unix Time') ... Args: - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that owns the + table. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. - column_spec_name (Optional[string]): + column_spec_name (Optional[str]): The name AutoML-assigned name for the column you want to set as the time column. - column_spec_display_name (Optional[string]): + column_spec_display_name (Optional[str]): The human-readable name of the column you want to set as the time column. If this is supplied in place of `column_spec_name`, you also need to provide either a way to lookup the source dataset (using one of the `dataset*` kwargs), or the `table_spec_name` of the table this column belongs to. - table_spec_name (Optional[string]): + table_spec_name (Optional[str]): The AutoML-assigned name for the table whose time column you want to set . If not supplied, the client can determine this name from a source `Dataset` object. @@ -1412,14 +1432,14 @@ def set_time_column( If no `table_spec_name` or `column_spec_name` was provided, we use this index to determine which table to set the time column on. - dataset_display_name (Optional[string]): + dataset_display_name (Optional[str]): The human-readable name given to the dataset you want to update the time column of. If no `table_spec_name` is supplied, this will be used together with `table_spec_index` to infer the name of table to update the time column of. This must be supplied if `table_spec_name`, `dataset` or `dataset_name` are not supplied. - dataset_name (Optional[string]): + dataset_name (Optional[str]): The AutoML-assigned name given to the dataset you want to update the time column of. If no `table_spec_name` is supplied, this will be used together with `table_spec_index` to @@ -1495,26 +1515,26 @@ def clear_time_column( ... credentials=service_account.Credentials.from_service_account_file('~/.gcp/account.json') ... project='my-project', region='us-central1') ... - >>> client.set_time_column(dataset_display_name='my_dataset') + >>> client.clear_time_column(dataset_display_name='my_dataset') >>> Args: - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that owns the + table. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. - dataset_display_name (Optional[string]): + dataset_display_name (Optional[str]): The human-readable name given to the dataset you want to update the time column of. If no `table_spec_name` is supplied, this will be used together with `table_spec_index` to infer the name of table to update the time column of. This must be supplied if `table_spec_name`, `dataset` or `dataset_name` are not supplied. - dataset_name (Optional[string]): + dataset_name (Optional[str]): The AutoML-assigned name given to the dataset you want to update the time column of. If no `table_spec_name` is supplied, this will be used together with `table_spec_index` to @@ -1585,24 +1605,24 @@ def set_weight_column( ... Args: - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that owns the + table. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. - column_spec_name (Optional[string]): + column_spec_name (Optional[str]): The name AutoML-assigned name for the column you want to set as the weight column. - column_spec_display_name (Optional[string]): + column_spec_display_name (Optional[str]): The human-readable name of the column you want to set as the weight column. If this is supplied in place of `column_spec_name`, you also need to provide either a way to lookup the source dataset (using one of the `dataset*` kwargs), or the `table_spec_name` of the table this column belongs to. - table_spec_name (Optional[string]): + table_spec_name (Optional[str]): The AutoML-assigned name for the table whose weight column you want to set . If not supplied, the client can determine this name from a source `Dataset` object. @@ -1610,14 +1630,14 @@ def set_weight_column( If no `table_spec_name` or `column_spec_name` was provided, we use this index to determine which table to set the weight column on. - dataset_display_name (Optional[string]): + dataset_display_name (Optional[str]): The human-readable name given to the dataset you want to update the weight column of. If no `table_spec_name` is supplied, this will be used together with `table_spec_index` to infer the name of table to update the weight column of. This must be supplied if `table_spec_name`, `dataset` or `dataset_name` are not supplied. - dataset_name (Optional[string]): + dataset_name (Optional[str]): The AutoML-assigned name given to the dataset you want to update the weight column of. If no `table_spec_name` is supplied, this will be used together with `table_spec_index` to @@ -1697,22 +1717,22 @@ def clear_weight_column( >>> Args: - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that owns the + table. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. - dataset_display_name (Optional[string]): + dataset_display_name (Optional[str]): The human-readable name given to the dataset you want to update the weight column of. If no `table_spec_name` is supplied, this will be used together with `table_spec_index` to infer the name of table to update the weight column of. This must be supplied if `table_spec_name`, `dataset` or `dataset_name` are not supplied. - dataset_name (Optional[string]): + dataset_name (Optional[str]): The AutoML-assigned name given to the dataset you want to update the weight column of. If no `table_spec_name` is supplied, this will be used together with `table_spec_index` to @@ -1782,24 +1802,24 @@ def set_test_train_column( ... Args: - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that owns the + table. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. - column_spec_name (Optional[string]): + column_spec_name (Optional[str]): The name AutoML-assigned name for the column you want to set as the test/train column. - column_spec_display_name (Optional[string]): + column_spec_display_name (Optional[str]): The human-readable name of the column you want to set as the test/train column. If this is supplied in place of `column_spec_name`, you also need to provide either a way to lookup the source dataset (using one of the `dataset*` kwargs), or the `table_spec_name` of the table this column belongs to. - table_spec_name (Optional[string]): + table_spec_name (Optional[str]): The AutoML-assigned name for the table whose test/train column you want to set . If not supplied, the client can determine this name from a source `Dataset` object. @@ -1807,14 +1827,14 @@ def set_test_train_column( If no `table_spec_name` or `column_spec_name` was provided, we use this index to determine which table to set the test/train column on. - dataset_display_name (Optional[string]): + dataset_display_name (Optional[str]): The human-readable name given to the dataset you want to update the test/train column of. If no `table_spec_name` is supplied, this will be used together with `table_spec_index` to infer the name of table to update the test/train column of. This must be supplied if `table_spec_name`, `dataset` or `dataset_name` are not supplied. - dataset_name (Optional[string]): + dataset_name (Optional[str]): The AutoML-assigned name given to the dataset you want to update the test/train column of. If no `table_spec_name` is supplied, this will be used together with `table_spec_index` to @@ -1895,22 +1915,22 @@ def clear_test_train_column( >>> Args: - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that owns the + table. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. - dataset_display_name (Optional[string]): + dataset_display_name (Optional[str]): The human-readable name given to the dataset you want to update the test/train column of. If no `table_spec_name` is supplied, this will be used together with `table_spec_index` to infer the name of table to update the test/train column of. This must be supplied if `table_spec_name`, `dataset` or `dataset_name` are not supplied. - dataset_name (Optional[string]): + dataset_name (Optional[str]): The AutoML-assigned name given to the dataset you want to update the test/train column of. If no `table_spec_name` is supplied, this will be used together with `table_spec_index` to @@ -1970,12 +1990,12 @@ def list_models(self, project=None, region=None, **kwargs): ... Args: - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that owns the + models. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. @@ -2024,19 +2044,19 @@ def list_model_evaluations( ... Args: - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that owns the + model. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. - model_display_name (Optional[string]): + model_display_name (Optional[str]): The human-readable name given to the model you want to list evaluations for. This must be supplied if `model` or `model_name` are not supplied. - model_name (Optional[string]): + model_name (Optional[str]): The AutoML-assigned name given to the model you want to list evaluations for. This must be supplied if `model_display_name` or `model` are not supplied. @@ -2052,6 +2072,12 @@ def list_model_evaluations( instances. You can also iterate over the pages of the response using its `pages` property. + For a regression model, there will only be one evaluation. For a + classification model there will be on for each classification + label, as well as one for micro-averaged metrics. See more + documentation here: + https://cloud.google.com/automl-tables/docs/evaluate#automl-tables-list-model-evaluations-cli-curl:w + Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. @@ -2096,33 +2122,37 @@ def create_model( ... credentials=service_account.Credentials.from_service_account_file('~/.gcp/account.json') ... project='my-project', region='us-central1') ... - >>> m = client.create_model('my_model', dataset_display_name='my_dataset') + >>> m = client.create_model( + ... 'my_model', + ... dataset_display_name='my_dataset', + ... train_budget_milli_node_hours=1000 + ... ) >>> >>> m.result() # blocks on result >>> Args: - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that will own the + model. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. - model_display_name (string): + model_display_name (str): A human-readable name to refer to this model by. train_budget_milli_node_hours (int): The amount of time (in thousandths of an hour) to spend training. This value must be between 1,000 and 72,000 inclusive (between 1 and 72 hours). - optimization_objective (string): + optimization_objective (str): The metric AutoML tables should optimize for. - dataset_display_name (Optional[string]): + dataset_display_name (Optional[str]): The human-readable name given to the dataset you want to train your model on. This must be supplied if `dataset` or `dataset_name` are not supplied. - dataset_name (Optional[string]): + dataset_name (Optional[str]): The AutoML-assigned name given to the dataset you want to train your model on. This must be supplied if `dataset_display_name` or `dataset` are not supplied. @@ -2132,15 +2162,17 @@ def create_model( are not supplied. model_metadata (Optional[Dict]): Optional model metadata to supply to the client. - include_column_spec_names(Optional[string]): + include_column_spec_names(Optional[str]): The list of the names of the columns you want to include to train your model on. - exclude_column_spec_names(Optional[string]): + exclude_column_spec_names(Optional[str]): The list of the names of the columns you want to exclude and not train your model on. Returns: - A :class:`~google.cloud.automl_v1beta1.types._OperationFuture` - instance. + google.api_core.operation.Operation: + An operation future that can be used to check for + completion synchronously or asynchronously. + Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. @@ -2245,19 +2277,19 @@ def delete_model( >>> Args: - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that owns the + model. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. - model_display_name (Optional[string]): + model_display_name (Optional[str]): The human-readable name given to the model you want to delete. This must be supplied if `model` or `model_name` are not supplied. - model_name (Optional[string]): + model_name (Optional[str]): The AutoML-assigned name given to the model you want to delete. This must be supplied if `model_display_name` or `model` are not supplied. @@ -2267,8 +2299,9 @@ def delete_model( supplied. Returns: - A :class:`~google.cloud.automl_v1beta1.types._OperationFuture` - instance. + google.api_core.operation.Operation: + An operation future that can be used to check for + completion synchronously or asynchronously. Raises: google.api_core.exceptions.GoogleAPICallError: If the request @@ -2312,15 +2345,15 @@ def get_model_evaluation( >>> Args: - model_evaluation_name (string): + model_evaluation_name (str): This is the fully-qualified name generated by the AutoML API for this model evaluation. - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that owns the + model. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. @@ -2359,21 +2392,21 @@ def get_model( >>> Args: - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that owns the + model. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. - model_name (Optional[string]): + model_name (Optional[str]): This is the fully-qualified name generated by the AutoML API for this model. This is not to be confused with the human-assigned `model_display_name` that is provided when creating a model. Either `model_name` or `model_display_name` must be provided. - model_display_name (Optional[string]): + model_display_name (Optional[str]): This is the name you provided for the model when first creating it. Either `model_name` or `model_display_name` must be provided. @@ -2428,19 +2461,19 @@ def deploy_model( >>> Args: - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that owns the + model. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. - model_display_name (Optional[string]): + model_display_name (Optional[str]): The human-readable name given to the model you want to deploy. This must be supplied if `model` or `model_name` are not supplied. - model_name (Optional[string]): + model_name (Optional[str]): The AutoML-assigned name given to the model you want to deploy. This must be supplied if `model_display_name` or `model` are not supplied. @@ -2450,8 +2483,9 @@ def deploy_model( supplied. Returns: - A :class:`~google.cloud.automl_v1beta1.types._OperationFuture` - instance. + google.api_core.operation.Operation: + An operation future that can be used to check for + completion synchronously or asynchronously. Raises: google.api_core.exceptions.GoogleAPICallError: If the request @@ -2499,19 +2533,19 @@ def undeploy_model( >>> Args: - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that owns the + model. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. - model_display_name (Optional[string]): + model_display_name (Optional[str]): The human-readable name given to the model you want to undeploy. This must be supplied if `model` or `model_name` are not supplied. - model_name (Optional[string]): + model_name (Optional[str]): The AutoML-assigned name given to the model you want to undeploy. This must be supplied if `model_display_name` or `model` are not supplied. @@ -2521,8 +2555,9 @@ def undeploy_model( supplied. Returns: - A :class:`~google.cloud.automl_v1beta1.types._OperationFuture` - instance. + google.api_core.operation.Operation: + An operation future that can be used to check for + completion synchronously or asynchronously. Raises: google.api_core.exceptions.GoogleAPICallError: If the request @@ -2574,22 +2609,22 @@ def predict( >>> Args: - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that owns the + model. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. - inputs (Union[List[string], Dict[string, string]]): + inputs (Union[List[str], Dict[str, str]]): Either the sorted list of column values to predict with, or a key-value map of column display name to value to predict with. - model_display_name (Optional[string]): + model_display_name (Optional[str]): The human-readable name given to the model you want to predict with. This must be supplied if `model` or `model_name` are not supplied. - model_name (Optional[string]): + model_name (Optional[str]): The AutoML-assigned name given to the model you want to predict with. This must be supplied if `model_display_name` or `model` are not supplied. @@ -2676,12 +2711,12 @@ def batch_predict( ... Args: - project (Optional[string]): - If you have initialized the client with a value for `project` - it will be used if this parameter is not supplied. Keep in - mind, the service account this client was initialized with must - have access to this project. - region (Optional[string]): + project (Optional[str]): The ID of the project that owns the + model. If you have initialized the client with a value for + `project` it will be used if this parameter is not supplied. + Keep in mind, the service account this client was initialized + with must have access to this project. + region (Optional[str]): If you have initialized the client with a value for `region` it will be used if this parameter is not supplied. credentials (Optional[google.auth.credentials.Credentials]): The @@ -2695,24 +2730,24 @@ def batch_predict( staged to GCS in `gs://{project}-automl-tables-staging/{uploaded_csv_name}` This must be supplied if neither `gcs_input_uris` nor `bigquery_input_uri` is supplied. - gcs_input_uris (Optional(Union[List[string], string])) + gcs_input_uris (Optional(Union[List[str], str])) Either a list of or a single GCS URI containing the data you want to predict off of. This must be supplied if neither `pandas_dataframe` nor `bigquery_input_uri` is supplied. - gcs_output_uri_prefix (Optional[string]) + gcs_output_uri_prefix (Optional[str]) The folder in GCS you want to write output to. This must be supplied if `bigquery_output_uri` is not. - bigquery_input_uri (Optional[string]) + bigquery_input_uri (Optional[str]) The BigQuery table to input data from. This must be supplied if neither `pandas_dataframe` nor `gcs_input_uris` is supplied. - bigquery_output_uri (Optional[string]) + bigquery_output_uri (Optional[str]) The BigQuery table to output data to. This must be supplied if `gcs_output_uri_prefix` is not. - model_display_name (Optional[string]): + model_display_name (Optional[str]): The human-readable name given to the model you want to predict with. This must be supplied if `model` or `model_name` are not supplied. - model_name (Optional[string]): + model_name (Optional[str]): The AutoML-assigned name given to the model you want to predict with. This must be supplied if `model_display_name` or `model` are not supplied. @@ -2722,8 +2757,9 @@ def batch_predict( supplied. Returns: - A :class:`~google.cloud.automl_v1beta1.types._OperationFuture` - instance. + google.api_core.operation.Operation: + An operation future that can be used to check for + completion synchronously or asynchronously. Raises: google.api_core.exceptions.GoogleAPICallError: If the request @@ -2744,7 +2780,10 @@ def batch_predict( input_request = None if pandas_dataframe is not None: - self.__ensure_gcs_client_is_initialized(credentials) + project = project or self.project + region = region or self.region + credentials = credentials or self.credentials + self.__ensure_gcs_client_is_initialized(credentials, project) self.gcs_client.ensure_bucket_exists(project, region) gcs_input_uri = self.gcs_client.upload_pandas_dataframe(pandas_dataframe) input_request = {"gcs_source": {"input_uris": [gcs_input_uri]}} diff --git a/automl/setup.py b/automl/setup.py index c76f945594e8..a810e86ef966 100644 --- a/automl/setup.py +++ b/automl/setup.py @@ -19,7 +19,7 @@ name = "google-cloud-automl" description = "Cloud AutoML API client library" -version = "0.5.0" +version = "0.7.1" release_status = "Development Status :: 3 - Alpha" dependencies = [ "google-api-core[grpc] >= 1.14.0, < 2.0.0dev", diff --git a/automl/synth.metadata b/automl/synth.metadata index 026f39141b56..641ff4cd295e 100644 --- a/automl/synth.metadata +++ b/automl/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-09-25T12:11:40.675705Z", + "updateTime": "2019-10-08T12:12:09.104671Z", "sources": [ { "generator": { "name": "artman", - "version": "0.37.1", - "dockerImage": "googleapis/artman@sha256:6068f67900a3f0bdece596b97bda8fc70406ca0e137a941f4c81d3217c994a80" + "version": "0.38.0", + "dockerImage": "googleapis/artman@sha256:0d2f8d429110aeb8d82df6550ef4ede59d40df9062d260a1580fce688b0512bf" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "9dc1d37b6b9e9581c8ab56c6b2d3b49ff3eeb254", - "internalRef": "271101725" + "sha": "122bdbf877ad87439f8dd9d1474a8e5dde188087", + "internalRef": "273381131" } }, { @@ -34,6 +34,16 @@ "generator": "gapic", "config": "google/cloud/automl/artman_automl_v1beta1.yaml" } + }, + { + "client": { + "source": "googleapis", + "apiName": "automl", + "apiVersion": "v1", + "language": "python", + "generator": "gapic", + "config": "google/cloud/automl/artman_automl_v1.yaml" + } } ] } \ No newline at end of file diff --git a/automl/synth.py b/automl/synth.py index 6176f5b3b1e1..937bb0abfa5d 100644 --- a/automl/synth.py +++ b/automl/synth.py @@ -21,7 +21,7 @@ gapic = gcp.GAPICGenerator() common = gcp.CommonTemplates() -versions = ["v1beta1"] +versions = ["v1beta1", "v1"] # ---------------------------------------------------------------------------- diff --git a/automl/tests/unit/gapic/v1/test_auto_ml_client_v1.py b/automl/tests/unit/gapic/v1/test_auto_ml_client_v1.py new file mode 100644 index 000000000000..cdf4555f1969 --- /dev/null +++ b/automl/tests/unit/gapic/v1/test_auto_ml_client_v1.py @@ -0,0 +1,780 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Unit tests.""" + +import mock +import pytest + +from google.rpc import status_pb2 + +from google.cloud import automl_v1 +from google.cloud.automl_v1.proto import dataset_pb2 +from google.cloud.automl_v1.proto import io_pb2 +from google.cloud.automl_v1.proto import model_evaluation_pb2 +from google.cloud.automl_v1.proto import model_pb2 +from google.cloud.automl_v1.proto import service_pb2 +from google.longrunning import operations_pb2 +from google.protobuf import empty_pb2 +from google.protobuf import field_mask_pb2 + + +class MultiCallableStub(object): + """Stub for the grpc.UnaryUnaryMultiCallable interface.""" + + def __init__(self, method, channel_stub): + self.method = method + self.channel_stub = channel_stub + + def __call__(self, request, timeout=None, metadata=None, credentials=None): + self.channel_stub.requests.append((self.method, request)) + + response = None + if self.channel_stub.responses: + response = self.channel_stub.responses.pop() + + if isinstance(response, Exception): + raise response + + if response: + return response + + +class ChannelStub(object): + """Stub for the grpc.Channel interface.""" + + def __init__(self, responses=[]): + self.responses = responses + self.requests = [] + + def unary_unary(self, method, request_serializer=None, response_deserializer=None): + return MultiCallableStub(method, self) + + +class CustomException(Exception): + pass + + +class TestAutoMlClient(object): + def test_create_dataset(self): + # Setup Expected Response + name = "name3373707" + display_name = "displayName1615086568" + description = "description-1724546052" + example_count = 1517063674 + etag = "etag3123477" + expected_response = { + "name": name, + "display_name": display_name, + "description": description, + "example_count": example_count, + "etag": etag, + } + expected_response = dataset_pb2.Dataset(**expected_response) + operation = operations_pb2.Operation( + name="operations/test_create_dataset", done=True + ) + operation.response.Pack(expected_response) + + # Mock the API response + channel = ChannelStub(responses=[operation]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup Request + parent = client.location_path("[PROJECT]", "[LOCATION]") + dataset = {} + + response = client.create_dataset(parent, dataset) + result = response.result() + assert expected_response == result + + assert len(channel.requests) == 1 + expected_request = service_pb2.CreateDatasetRequest( + parent=parent, dataset=dataset + ) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_create_dataset_exception(self): + # Setup Response + error = status_pb2.Status() + operation = operations_pb2.Operation( + name="operations/test_create_dataset_exception", done=True + ) + operation.error.CopyFrom(error) + + # Mock the API response + channel = ChannelStub(responses=[operation]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup Request + parent = client.location_path("[PROJECT]", "[LOCATION]") + dataset = {} + + response = client.create_dataset(parent, dataset) + exception = response.exception() + assert exception.errors[0] == error + + def test_update_dataset(self): + # Setup Expected Response + name = "name3373707" + display_name = "displayName1615086568" + description = "description-1724546052" + example_count = 1517063674 + etag = "etag3123477" + expected_response = { + "name": name, + "display_name": display_name, + "description": description, + "example_count": example_count, + "etag": etag, + } + expected_response = dataset_pb2.Dataset(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup Request + dataset = {} + update_mask = {} + + response = client.update_dataset(dataset, update_mask) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = service_pb2.UpdateDatasetRequest( + dataset=dataset, update_mask=update_mask + ) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_update_dataset_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup request + dataset = {} + update_mask = {} + + with pytest.raises(CustomException): + client.update_dataset(dataset, update_mask) + + def test_get_dataset(self): + # Setup Expected Response + name_2 = "name2-1052831874" + display_name = "displayName1615086568" + description = "description-1724546052" + example_count = 1517063674 + etag = "etag3123477" + expected_response = { + "name": name_2, + "display_name": display_name, + "description": description, + "example_count": example_count, + "etag": etag, + } + expected_response = dataset_pb2.Dataset(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup Request + name = client.dataset_path("[PROJECT]", "[LOCATION]", "[DATASET]") + + response = client.get_dataset(name) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = service_pb2.GetDatasetRequest(name=name) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_get_dataset_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup request + name = client.dataset_path("[PROJECT]", "[LOCATION]", "[DATASET]") + + with pytest.raises(CustomException): + client.get_dataset(name) + + def test_list_datasets(self): + # Setup Expected Response + next_page_token = "" + datasets_element = {} + datasets = [datasets_element] + expected_response = {"next_page_token": next_page_token, "datasets": datasets} + expected_response = service_pb2.ListDatasetsResponse(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup Request + parent = client.location_path("[PROJECT]", "[LOCATION]") + + paged_list_response = client.list_datasets(parent) + resources = list(paged_list_response) + assert len(resources) == 1 + + assert expected_response.datasets[0] == resources[0] + + assert len(channel.requests) == 1 + expected_request = service_pb2.ListDatasetsRequest(parent=parent) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_list_datasets_exception(self): + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup request + parent = client.location_path("[PROJECT]", "[LOCATION]") + + paged_list_response = client.list_datasets(parent) + with pytest.raises(CustomException): + list(paged_list_response) + + def test_delete_dataset(self): + # Setup Expected Response + expected_response = {} + expected_response = empty_pb2.Empty(**expected_response) + operation = operations_pb2.Operation( + name="operations/test_delete_dataset", done=True + ) + operation.response.Pack(expected_response) + + # Mock the API response + channel = ChannelStub(responses=[operation]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup Request + name = client.dataset_path("[PROJECT]", "[LOCATION]", "[DATASET]") + + response = client.delete_dataset(name) + result = response.result() + assert expected_response == result + + assert len(channel.requests) == 1 + expected_request = service_pb2.DeleteDatasetRequest(name=name) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_delete_dataset_exception(self): + # Setup Response + error = status_pb2.Status() + operation = operations_pb2.Operation( + name="operations/test_delete_dataset_exception", done=True + ) + operation.error.CopyFrom(error) + + # Mock the API response + channel = ChannelStub(responses=[operation]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup Request + name = client.dataset_path("[PROJECT]", "[LOCATION]", "[DATASET]") + + response = client.delete_dataset(name) + exception = response.exception() + assert exception.errors[0] == error + + def test_import_data(self): + # Setup Expected Response + expected_response = {} + expected_response = empty_pb2.Empty(**expected_response) + operation = operations_pb2.Operation( + name="operations/test_import_data", done=True + ) + operation.response.Pack(expected_response) + + # Mock the API response + channel = ChannelStub(responses=[operation]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup Request + name = client.dataset_path("[PROJECT]", "[LOCATION]", "[DATASET]") + input_config = {} + + response = client.import_data(name, input_config) + result = response.result() + assert expected_response == result + + assert len(channel.requests) == 1 + expected_request = service_pb2.ImportDataRequest( + name=name, input_config=input_config + ) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_import_data_exception(self): + # Setup Response + error = status_pb2.Status() + operation = operations_pb2.Operation( + name="operations/test_import_data_exception", done=True + ) + operation.error.CopyFrom(error) + + # Mock the API response + channel = ChannelStub(responses=[operation]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup Request + name = client.dataset_path("[PROJECT]", "[LOCATION]", "[DATASET]") + input_config = {} + + response = client.import_data(name, input_config) + exception = response.exception() + assert exception.errors[0] == error + + def test_export_data(self): + # Setup Expected Response + expected_response = {} + expected_response = empty_pb2.Empty(**expected_response) + operation = operations_pb2.Operation( + name="operations/test_export_data", done=True + ) + operation.response.Pack(expected_response) + + # Mock the API response + channel = ChannelStub(responses=[operation]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup Request + name = client.dataset_path("[PROJECT]", "[LOCATION]", "[DATASET]") + output_config = {} + + response = client.export_data(name, output_config) + result = response.result() + assert expected_response == result + + assert len(channel.requests) == 1 + expected_request = service_pb2.ExportDataRequest( + name=name, output_config=output_config + ) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_export_data_exception(self): + # Setup Response + error = status_pb2.Status() + operation = operations_pb2.Operation( + name="operations/test_export_data_exception", done=True + ) + operation.error.CopyFrom(error) + + # Mock the API response + channel = ChannelStub(responses=[operation]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup Request + name = client.dataset_path("[PROJECT]", "[LOCATION]", "[DATASET]") + output_config = {} + + response = client.export_data(name, output_config) + exception = response.exception() + assert exception.errors[0] == error + + def test_create_model(self): + # Setup Expected Response + name = "name3373707" + display_name = "displayName1615086568" + dataset_id = "datasetId-2115646910" + expected_response = { + "name": name, + "display_name": display_name, + "dataset_id": dataset_id, + } + expected_response = model_pb2.Model(**expected_response) + operation = operations_pb2.Operation( + name="operations/test_create_model", done=True + ) + operation.response.Pack(expected_response) + + # Mock the API response + channel = ChannelStub(responses=[operation]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup Request + parent = client.location_path("[PROJECT]", "[LOCATION]") + model = {} + + response = client.create_model(parent, model) + result = response.result() + assert expected_response == result + + assert len(channel.requests) == 1 + expected_request = service_pb2.CreateModelRequest(parent=parent, model=model) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_create_model_exception(self): + # Setup Response + error = status_pb2.Status() + operation = operations_pb2.Operation( + name="operations/test_create_model_exception", done=True + ) + operation.error.CopyFrom(error) + + # Mock the API response + channel = ChannelStub(responses=[operation]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup Request + parent = client.location_path("[PROJECT]", "[LOCATION]") + model = {} + + response = client.create_model(parent, model) + exception = response.exception() + assert exception.errors[0] == error + + def test_get_model(self): + # Setup Expected Response + name_2 = "name2-1052831874" + display_name = "displayName1615086568" + dataset_id = "datasetId-2115646910" + expected_response = { + "name": name_2, + "display_name": display_name, + "dataset_id": dataset_id, + } + expected_response = model_pb2.Model(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup Request + name = client.model_path("[PROJECT]", "[LOCATION]", "[MODEL]") + + response = client.get_model(name) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = service_pb2.GetModelRequest(name=name) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_get_model_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup request + name = client.model_path("[PROJECT]", "[LOCATION]", "[MODEL]") + + with pytest.raises(CustomException): + client.get_model(name) + + def test_update_model(self): + # Setup Expected Response + name = "name3373707" + display_name = "displayName1615086568" + dataset_id = "datasetId-2115646910" + expected_response = { + "name": name, + "display_name": display_name, + "dataset_id": dataset_id, + } + expected_response = model_pb2.Model(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup Request + model = {} + update_mask = {} + + response = client.update_model(model, update_mask) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = service_pb2.UpdateModelRequest( + model=model, update_mask=update_mask + ) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_update_model_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup request + model = {} + update_mask = {} + + with pytest.raises(CustomException): + client.update_model(model, update_mask) + + def test_list_models(self): + # Setup Expected Response + next_page_token = "" + model_element = {} + model = [model_element] + expected_response = {"next_page_token": next_page_token, "model": model} + expected_response = service_pb2.ListModelsResponse(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup Request + parent = client.location_path("[PROJECT]", "[LOCATION]") + + paged_list_response = client.list_models(parent) + resources = list(paged_list_response) + assert len(resources) == 1 + + assert expected_response.model[0] == resources[0] + + assert len(channel.requests) == 1 + expected_request = service_pb2.ListModelsRequest(parent=parent) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_list_models_exception(self): + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup request + parent = client.location_path("[PROJECT]", "[LOCATION]") + + paged_list_response = client.list_models(parent) + with pytest.raises(CustomException): + list(paged_list_response) + + def test_delete_model(self): + # Setup Expected Response + expected_response = {} + expected_response = empty_pb2.Empty(**expected_response) + operation = operations_pb2.Operation( + name="operations/test_delete_model", done=True + ) + operation.response.Pack(expected_response) + + # Mock the API response + channel = ChannelStub(responses=[operation]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup Request + name = client.model_path("[PROJECT]", "[LOCATION]", "[MODEL]") + + response = client.delete_model(name) + result = response.result() + assert expected_response == result + + assert len(channel.requests) == 1 + expected_request = service_pb2.DeleteModelRequest(name=name) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_delete_model_exception(self): + # Setup Response + error = status_pb2.Status() + operation = operations_pb2.Operation( + name="operations/test_delete_model_exception", done=True + ) + operation.error.CopyFrom(error) + + # Mock the API response + channel = ChannelStub(responses=[operation]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup Request + name = client.model_path("[PROJECT]", "[LOCATION]", "[MODEL]") + + response = client.delete_model(name) + exception = response.exception() + assert exception.errors[0] == error + + def test_get_model_evaluation(self): + # Setup Expected Response + name_2 = "name2-1052831874" + annotation_spec_id = "annotationSpecId60690191" + evaluated_example_count = 277565350 + expected_response = { + "name": name_2, + "annotation_spec_id": annotation_spec_id, + "evaluated_example_count": evaluated_example_count, + } + expected_response = model_evaluation_pb2.ModelEvaluation(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup Request + name = client.model_evaluation_path( + "[PROJECT]", "[LOCATION]", "[MODEL]", "[MODEL_EVALUATION]" + ) + + response = client.get_model_evaluation(name) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = service_pb2.GetModelEvaluationRequest(name=name) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_get_model_evaluation_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup request + name = client.model_evaluation_path( + "[PROJECT]", "[LOCATION]", "[MODEL]", "[MODEL_EVALUATION]" + ) + + with pytest.raises(CustomException): + client.get_model_evaluation(name) + + def test_list_model_evaluations(self): + # Setup Expected Response + next_page_token = "" + model_evaluation_element = {} + model_evaluation = [model_evaluation_element] + expected_response = { + "next_page_token": next_page_token, + "model_evaluation": model_evaluation, + } + expected_response = service_pb2.ListModelEvaluationsResponse( + **expected_response + ) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup Request + parent = client.model_path("[PROJECT]", "[LOCATION]", "[MODEL]") + filter_ = "filter-1274492040" + + paged_list_response = client.list_model_evaluations(parent, filter_) + resources = list(paged_list_response) + assert len(resources) == 1 + + assert expected_response.model_evaluation[0] == resources[0] + + assert len(channel.requests) == 1 + expected_request = service_pb2.ListModelEvaluationsRequest( + parent=parent, filter=filter_ + ) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_list_model_evaluations_exception(self): + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = automl_v1.AutoMlClient() + + # Setup request + parent = client.model_path("[PROJECT]", "[LOCATION]", "[MODEL]") + filter_ = "filter-1274492040" + + paged_list_response = client.list_model_evaluations(parent, filter_) + with pytest.raises(CustomException): + list(paged_list_response) diff --git a/videointelligence/tests/unit/gapic/v1beta1/test_video_intelligence_service_client_v1beta1.py b/automl/tests/unit/gapic/v1/test_prediction_service_client_v1.py similarity index 53% rename from videointelligence/tests/unit/gapic/v1beta1/test_video_intelligence_service_client_v1beta1.py rename to automl/tests/unit/gapic/v1/test_prediction_service_client_v1.py index 27926f1cf767..02d12f0ad4d3 100644 --- a/videointelligence/tests/unit/gapic/v1beta1/test_video_intelligence_service_client_v1beta1.py +++ b/automl/tests/unit/gapic/v1/test_prediction_service_client_v1.py @@ -19,12 +19,9 @@ import mock import pytest -from google.rpc import status_pb2 - -from google.cloud import videointelligence_v1beta1 -from google.cloud.videointelligence_v1beta1 import enums -from google.cloud.videointelligence_v1beta1.proto import video_intelligence_pb2 -from google.longrunning import operations_pb2 +from google.cloud import automl_v1 +from google.cloud.automl_v1.proto import data_items_pb2 +from google.cloud.automl_v1.proto import prediction_service_pb2 class MultiCallableStub(object): @@ -63,61 +60,44 @@ class CustomException(Exception): pass -class TestVideoIntelligenceServiceClient(object): - def test_annotate_video(self): +class TestPredictionServiceClient(object): + def test_predict(self): # Setup Expected Response expected_response = {} - expected_response = video_intelligence_pb2.AnnotateVideoResponse( - **expected_response - ) - operation = operations_pb2.Operation( - name="operations/test_annotate_video", done=True - ) - operation.response.Pack(expected_response) + expected_response = prediction_service_pb2.PredictResponse(**expected_response) # Mock the API response - channel = ChannelStub(responses=[operation]) + channel = ChannelStub(responses=[expected_response]) patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel - client = videointelligence_v1beta1.VideoIntelligenceServiceClient() + client = automl_v1.PredictionServiceClient() # Setup Request - input_uri = "gs://cloud-samples-data/video/cat.mp4" - features_element = enums.Feature.LABEL_DETECTION - features = [features_element] + name = client.model_path("[PROJECT]", "[LOCATION]", "[MODEL]") + payload = {} - response = client.annotate_video(input_uri, features) - result = response.result() - assert expected_response == result + response = client.predict(name, payload) + assert expected_response == response assert len(channel.requests) == 1 - expected_request = video_intelligence_pb2.AnnotateVideoRequest( - input_uri=input_uri, features=features + expected_request = prediction_service_pb2.PredictRequest( + name=name, payload=payload ) actual_request = channel.requests[0][1] assert expected_request == actual_request - def test_annotate_video_exception(self): - # Setup Response - error = status_pb2.Status() - operation = operations_pb2.Operation( - name="operations/test_annotate_video_exception", done=True - ) - operation.error.CopyFrom(error) - + def test_predict_exception(self): # Mock the API response - channel = ChannelStub(responses=[operation]) + channel = ChannelStub(responses=[CustomException()]) patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel - client = videointelligence_v1beta1.VideoIntelligenceServiceClient() + client = automl_v1.PredictionServiceClient() - # Setup Request - input_uri = "gs://cloud-samples-data/video/cat.mp4" - features_element = enums.Feature.LABEL_DETECTION - features = [features_element] + # Setup request + name = client.model_path("[PROJECT]", "[LOCATION]", "[MODEL]") + payload = {} - response = client.annotate_video(input_uri, features) - exception = response.exception() - assert exception.errors[0] == error + with pytest.raises(CustomException): + client.predict(name, payload) diff --git a/automl/tests/unit/gapic/v1beta1/test_gcs_client_v1beta1.py b/automl/tests/unit/gapic/v1beta1/test_gcs_client_v1beta1.py index 49d4a0f85423..f7a2e27ab7d8 100644 --- a/automl/tests/unit/gapic/v1beta1/test_gcs_client_v1beta1.py +++ b/automl/tests/unit/gapic/v1beta1/test_gcs_client_v1beta1.py @@ -22,8 +22,11 @@ import re from google.api_core import exceptions +from google.auth.credentials import AnonymousCredentials from google.cloud import automl_v1beta1 +PROJECT = "project" + class TestGcsClient(object): def gcs_client(self, bucket_name=None, client_attrs={}): @@ -32,6 +35,24 @@ def gcs_client(self, bucket_name=None, client_attrs={}): bucket_name=bucket_name, client=client_mock ) + def test_init_with_project_and_credentials(self): + # helper for checking that the storage client is initialized with the + # passed in project and credentials. + class FakeStorageClient: + def __init__(self, project=None, credentials=None): + self.project = project + self.credentials = credentials + + patch = mock.patch("google.cloud.storage.Client", new=FakeStorageClient) + with patch: + credentials = AnonymousCredentials() + gcs_client = automl_v1beta1.tables.gcs_client.GcsClient( + project=PROJECT, credentials=credentials + ) + assert isinstance(gcs_client.client, FakeStorageClient) + assert gcs_client.client.project == PROJECT + assert gcs_client.client.credentials == credentials + def test_ensure_bucket_exists(self): mock_bucket = mock.Mock() gcs_client = self.gcs_client( diff --git a/automl/tests/unit/gapic/v1beta1/test_tables_client_v1beta1.py b/automl/tests/unit/gapic/v1beta1/test_tables_client_v1beta1.py index aa1babfa8752..516a4b76080d 100644 --- a/automl/tests/unit/gapic/v1beta1/test_tables_client_v1beta1.py +++ b/automl/tests/unit/gapic/v1beta1/test_tables_client_v1beta1.py @@ -20,8 +20,9 @@ import pandas import pytest -from google.cloud import automl_v1beta1 from google.api_core import exceptions +from google.auth.credentials import AnonymousCredentials +from google.cloud import automl_v1beta1 from google.cloud.automl_v1beta1.proto import data_types_pb2 PROJECT = "project" @@ -214,6 +215,33 @@ def test_import_pandas_dataframe(self): "name", {"gcs_source": {"input_uris": ["uri"]}} ) + def test_import_pandas_dataframe_init_gcs(self): + client = automl_v1beta1.TablesClient( + client=mock.Mock(), + prediction_client=mock.Mock(), + project=PROJECT, + region=REGION, + credentials=AnonymousCredentials(), + ) + + dataframe = pandas.DataFrame({}) + patch = mock.patch( + "google.cloud.automl_v1beta1.tables.tables_client.gcs_client.GcsClient", + bucket_name="my_bucket", + ) + with patch as MockGcsClient: + mockInstance = MockGcsClient.return_value + mockInstance.upload_pandas_dataframe.return_value = "uri" + + client.import_data(dataset_name="name", pandas_dataframe=dataframe) + + assert client.gcs_client is mockInstance + client.gcs_client.ensure_bucket_exists.assert_called_with(PROJECT, REGION) + client.gcs_client.upload_pandas_dataframe.assert_called_with(dataframe) + client.auto_ml_client.import_data.assert_called_with( + "name", {"gcs_source": {"input_uris": ["uri"]}} + ) + def test_import_gcs_uri(self): client = self.tables_client({"import_data.return_value": None}, {}) client.import_data(dataset_name="name", gcs_input_uris="uri") @@ -1220,6 +1248,40 @@ def test_batch_predict_pandas_dataframe(self): {"gcs_destination": {"output_uri_prefix": "gs://output"}}, ) + def test_batch_predict_pandas_dataframe_init_gcs(self): + client = automl_v1beta1.TablesClient( + client=mock.Mock(), + prediction_client=mock.Mock(), + project=PROJECT, + region=REGION, + credentials=AnonymousCredentials(), + ) + + dataframe = pandas.DataFrame({}) + patch = mock.patch( + "google.cloud.automl_v1beta1.tables.tables_client.gcs_client.GcsClient", + bucket_name="my_bucket", + ) + with patch as MockGcsClient: + mockInstance = MockGcsClient.return_value + mockInstance.upload_pandas_dataframe.return_value = "gs://input" + + dataframe = pandas.DataFrame({}) + client.batch_predict( + model_name="my_model", + pandas_dataframe=dataframe, + gcs_output_uri_prefix="gs://output", + ) + + client.gcs_client.ensure_bucket_exists.assert_called_with(PROJECT, REGION) + client.gcs_client.upload_pandas_dataframe.assert_called_with(dataframe) + + client.prediction_client.batch_predict.assert_called_with( + "my_model", + {"gcs_source": {"input_uris": ["gs://input"]}}, + {"gcs_destination": {"output_uri_prefix": "gs://output"}}, + ) + def test_batch_predict_gcs(self): client = self.tables_client({}, {}) client.batch_predict( @@ -1317,3 +1379,25 @@ def test_batch_predict_no_model(self): ) client.auto_ml_client.list_models.assert_not_called() client.prediction_client.batch_predict.assert_not_called() + + def test_auto_ml_client_credentials(self): + credentials_mock = mock.Mock() + patch_auto_ml_client = mock.patch( + "google.cloud.automl_v1beta1.gapic.auto_ml_client.AutoMlClient" + ) + with patch_auto_ml_client as MockAutoMlClient: + client = automl_v1beta1.TablesClient(credentials=credentials_mock) + _, auto_ml_client_kwargs = MockAutoMlClient.call_args + assert "credentials" in auto_ml_client_kwargs + assert auto_ml_client_kwargs["credentials"] == credentials_mock + + def test_prediction_client_credentials(self): + credentials_mock = mock.Mock() + patch_prediction_client = mock.patch( + "google.cloud.automl_v1beta1.gapic.prediction_service_client.PredictionServiceClient" + ) + with patch_prediction_client as MockPredictionClient: + client = automl_v1beta1.TablesClient(credentials=credentials_mock) + _, prediction_client_kwargs = MockPredictionClient.call_args + assert "credentials" in prediction_client_kwargs + assert prediction_client_kwargs["credentials"] == credentials_mock diff --git a/bigquery/CHANGELOG.md b/bigquery/CHANGELOG.md index 9170d004ecc7..1560e456a24e 100644 --- a/bigquery/CHANGELOG.md +++ b/bigquery/CHANGELOG.md @@ -4,6 +4,39 @@ [1]: https://pypi.org/project/google-cloud-bigquery/#history +## 1.21.0 + +10-16-2019 10:33 PDT + + +### New Features + +- add ability to pass in a table ID instead of a query to the `%%bigquery` magic ([#9170](https://github.com/googleapis/google-cloud-python/pull/9170)) +- add support for custom `QueryJobConfig` in `BigQuery.cursor.execute` method ([#9278](https://github.com/googleapis/google-cloud-python/pull/9278)) +- store `QueryJob` to destination var on error in `%%bigquery` magic ([#9245](https://github.com/googleapis/google-cloud-python/pull/9245)) +- add script statistics to job resource ([#9428](https://github.com/googleapis/google-cloud-python/pull/9428)) +- add support for sheets ranges ([#9416](https://github.com/googleapis/google-cloud-python/pull/9416)) +- add support for listing jobs by parent job ([#9225](https://github.com/googleapis/google-cloud-python/pull/9225)) +- expose customer managed encryption key for ML models ([#9302](https://github.com/googleapis/google-cloud-python/pull/9302)) +- add `Dataset.default_partition_expiration_ms` and `Table.require_partition_filter` properties ([#9464](https://github.com/googleapis/google-cloud-python/pull/9464)) + +### Dependencies + +- restrict version range of `google-resumable-media` ([#9243](https://github.com/googleapis/google-cloud-python/pull/9243)) + +### Documentation + +- document how to load data as JSON string ([#9231](https://github.com/googleapis/google-cloud-python/pull/9231)) +- standardize comments and formatting in existing code samples ([#9212](https://github.com/googleapis/google-cloud-python/pull/9212)) +- rewrite docstrings in Google style ([#9326](https://github.com/googleapis/google-cloud-python/pull/9326)) +- fix incorrect links to REST API in reference docs ([#9436](https://github.com/googleapis/google-cloud-python/pull/9436)) + +### Internal / Testing Changes + +- add code samples to lint check ([#9277](https://github.com/googleapis/google-cloud-python/pull/9277)) +- update code samples to use strings for table and dataset IDs ([#9136](https://github.com/googleapis/google-cloud-python/pull/9136)) +- simplify scripting system test to reduce flakiness ([#9458](https://github.com/googleapis/google-cloud-python/pull/9458)) + ## 1.20.0 09-13-2019 11:22 PDT diff --git a/bigquery/docs/reference.rst b/bigquery/docs/reference.rst index e01443808795..981059de5226 100644 --- a/bigquery/docs/reference.rst +++ b/bigquery/docs/reference.rst @@ -83,12 +83,13 @@ Table .. autosummary:: :toctree: generated + table.PartitionRange + table.RangePartitioning + table.Row + table.RowIterator table.Table table.TableListItem table.TableReference - table.Row - table.RowIterator - table.EncryptionConfiguration table.TimePartitioning table.TimePartitioningType @@ -173,6 +174,13 @@ Enums enums.StandardSqlDataTypes +Encryption Configuration +======================== + +.. autosummary:: + :toctree: generated + + encryption_configuration.EncryptionConfiguration Additional Types ================ diff --git a/bigquery/docs/snippets.py b/bigquery/docs/snippets.py index 4c39ff912230..83795460a955 100644 --- a/bigquery/docs/snippets.py +++ b/bigquery/docs/snippets.py @@ -13,11 +13,9 @@ # limitations under the License. """Testable usage examples for Google BigQuery API wrapper - Each example function takes a ``client`` argument (which must be an instance of :class:`google.cloud.bigquery.client.Client`) and uses it to perform a task with the API. - To facilitate running the examples as system tests, each example is also passed a ``to_delete`` list; the function adds to the list any objects created which need to be deleted during teardown. @@ -181,7 +179,7 @@ def test_create_table_cmek(client, to_delete): # Set the encryption key to use for the table. # TODO: Replace this key with a key you have created in Cloud KMS. kms_key_name = "projects/{}/locations/{}/keyRings/{}/cryptoKeys/{}".format( - "cloud-samples-tests", "us-central1", "test", "test" + "cloud-samples-tests", "us", "test", "test" ) table.encryption_configuration = bigquery.EncryptionConfiguration( kms_key_name=kms_key_name @@ -303,47 +301,6 @@ def test_load_and_query_partitioned_table(client, to_delete): assert len(rows) == 29 -# [START bigquery_table_exists] -def table_exists(client, table_reference): - """Return if a table exists. - - Args: - client (google.cloud.bigquery.client.Client): - A client to connect to the BigQuery API. - table_reference (google.cloud.bigquery.table.TableReference): - A reference to the table to look for. - - Returns: - bool: ``True`` if the table exists, ``False`` otherwise. - """ - from google.cloud.exceptions import NotFound - - try: - client.get_table(table_reference) - return True - except NotFound: - return False - - -# [END bigquery_table_exists] - - -def test_table_exists(client, to_delete): - """Determine if a table exists.""" - DATASET_ID = "get_table_dataset_{}".format(_millis()) - TABLE_ID = "get_table_table_{}".format(_millis()) - dataset = bigquery.Dataset(client.dataset(DATASET_ID)) - dataset = client.create_dataset(dataset) - to_delete.append(dataset) - - table_ref = dataset.table(TABLE_ID) - table = bigquery.Table(table_ref, schema=SCHEMA) - table = client.create_table(table) - - assert table_exists(client, table_ref) - assert not table_exists(client, dataset.table("i_dont_exist")) - - @pytest.mark.skip( reason=( "update_table() is flaky " @@ -543,7 +500,7 @@ def test_update_table_cmek(client, to_delete): table = bigquery.Table(dataset.table(table_id)) original_kms_key_name = "projects/{}/locations/{}/keyRings/{}/cryptoKeys/{}".format( - "cloud-samples-tests", "us-central1", "test", "test" + "cloud-samples-tests", "us", "test", "test" ) table.encryption_configuration = bigquery.EncryptionConfiguration( kms_key_name=original_kms_key_name @@ -559,8 +516,7 @@ def test_update_table_cmek(client, to_delete): # Set a new encryption key to use for the destination. # TODO: Replace this key with a key you have created in KMS. updated_kms_key_name = ( - "projects/cloud-samples-tests/locations/us-central1/" - "keyRings/test/cryptoKeys/otherkey" + "projects/cloud-samples-tests/locations/us/keyRings/test/cryptoKeys/otherkey" ) table.encryption_configuration = bigquery.EncryptionConfiguration( kms_key_name=updated_kms_key_name @@ -698,36 +654,6 @@ def test_manage_views(client, to_delete): # [END bigquery_grant_view_access] -def test_table_insert_rows(client, to_delete): - """Insert / fetch table data.""" - dataset_id = "table_insert_rows_dataset_{}".format(_millis()) - table_id = "table_insert_rows_table_{}".format(_millis()) - dataset = bigquery.Dataset(client.dataset(dataset_id)) - dataset = client.create_dataset(dataset) - dataset.location = "US" - to_delete.append(dataset) - - table = bigquery.Table(dataset.table(table_id), schema=SCHEMA) - table = client.create_table(table) - - # [START bigquery_table_insert_rows] - # TODO(developer): Uncomment the lines below and replace with your values. - # from google.cloud import bigquery - # client = bigquery.Client() - # dataset_id = 'my_dataset' # replace with your dataset ID - # For this sample, the table must already exist and have a defined schema - # table_id = 'my_table' # replace with your table ID - # table_ref = client.dataset(dataset_id).table(table_id) - # table = client.get_table(table_ref) # API request - - rows_to_insert = [(u"Phred Phlyntstone", 32), (u"Wylma Phlyntstone", 29)] - - errors = client.insert_rows(table, rows_to_insert) # API request - - assert errors == [] - # [END bigquery_table_insert_rows] - - def test_load_table_from_file(client, to_delete): """Upload table data from a CSV file.""" dataset_id = "load_table_from_file_dataset_{}".format(_millis()) @@ -904,7 +830,7 @@ def test_load_table_from_uri_cmek(client, to_delete): # Set the encryption key to use for the destination. # TODO: Replace this key with a key you have created in KMS. kms_key_name = "projects/{}/locations/{}/keyRings/{}/cryptoKeys/{}".format( - "cloud-samples-tests", "us-central1", "test", "test" + "cloud-samples-tests", "us", "test", "test" ) encryption_config = bigquery.EncryptionConfiguration(kms_key_name=kms_key_name) job_config.destination_encryption_configuration = encryption_config @@ -993,12 +919,10 @@ def test_load_table_from_uri_orc(client, to_delete, capsys): def test_load_table_from_uri_autodetect(client, to_delete, capsys): """Load table from a GCS URI using various formats and auto-detected schema - Each file format has its own tested load from URI sample. Because most of the code is common for autodetect, append, and truncate, this sample includes snippets for all supported formats but only calls a single load job. - This code snippet is made up of shared code, then format-specific code, followed by more shared code. Note that only the last format in the format-specific code section will be tested in this test. @@ -1058,12 +982,10 @@ def test_load_table_from_uri_autodetect(client, to_delete, capsys): def test_load_table_from_uri_truncate(client, to_delete, capsys): """Replaces table data with data from a GCS URI using various formats - Each file format has its own tested load from URI sample. Because most of the code is common for autodetect, append, and truncate, this sample includes snippets for all supported formats but only calls a single load job. - This code snippet is made up of shared code, then format-specific code, followed by more shared code. Note that only the last format in the format-specific code section will be tested in this test. @@ -1303,38 +1225,6 @@ def test_load_table_relax_column(client, to_delete): assert table.num_rows > 0 -def test_copy_table(client, to_delete): - dataset_id = "copy_table_dataset_{}".format(_millis()) - dest_dataset = bigquery.Dataset(client.dataset(dataset_id)) - dest_dataset.location = "US" - dest_dataset = client.create_dataset(dest_dataset) - to_delete.append(dest_dataset) - - # [START bigquery_copy_table] - # from google.cloud import bigquery - # client = bigquery.Client() - - source_dataset = client.dataset("samples", project="bigquery-public-data") - source_table_ref = source_dataset.table("shakespeare") - - # dataset_id = 'my_dataset' - dest_table_ref = client.dataset(dataset_id).table("destination_table") - - job = client.copy_table( - source_table_ref, - dest_table_ref, - # Location must match that of the source and destination tables. - location="US", - ) # API request - - job.result() # Waits for job to complete. - - assert job.state == "DONE" - dest_table = client.get_table(dest_table_ref) # API request - assert dest_table.num_rows > 0 - # [END bigquery_copy_table] - - def test_copy_table_multiple_source(client, to_delete): dest_dataset_id = "dest_dataset_{}".format(_millis()) dest_dataset = bigquery.Dataset(client.dataset(dest_dataset_id)) @@ -1414,7 +1304,7 @@ def test_copy_table_cmek(client, to_delete): # Set the encryption key to use for the destination. # TODO: Replace this key with a key you have created in KMS. kms_key_name = "projects/{}/locations/{}/keyRings/{}/cryptoKeys/{}".format( - "cloud-samples-tests", "us-central1", "test", "test" + "cloud-samples-tests", "us", "test", "test" ) encryption_config = bigquery.EncryptionConfiguration(kms_key_name=kms_key_name) job_config = bigquery.CopyJobConfig() @@ -1601,31 +1491,6 @@ def test_undelete_table(client, to_delete): # [END bigquery_undelete_table] -def test_client_query(client): - """Run a simple query.""" - - # [START bigquery_query] - # from google.cloud import bigquery - # client = bigquery.Client() - - query = ( - "SELECT name FROM `bigquery-public-data.usa_names.usa_1910_2013` " - 'WHERE state = "TX" ' - "LIMIT 100" - ) - query_job = client.query( - query, - # Location must match that of the dataset(s) referenced in the query. - location="US", - ) # API request - starts the query - - for row in query_job: # API request - fetches results - # Row values can be accessed by field name or index - assert row[0] == row.name == row["name"] - print(row) - # [END bigquery_query] - - def test_client_query_legacy_sql(client): """Run a query with Legacy SQL explicitly set""" # [START bigquery_query_legacy] @@ -1819,7 +1684,7 @@ def test_client_query_destination_table_cmek(client, to_delete): # Set the encryption key to use for the destination. # TODO: Replace this key with a key you have created in KMS. kms_key_name = "projects/{}/locations/{}/keyRings/{}/cryptoKeys/{}".format( - "cloud-samples-tests", "us-central1", "test", "test" + "cloud-samples-tests", "us", "test", "test" ) encryption_config = bigquery.EncryptionConfiguration(kms_key_name=kms_key_name) job_config.destination_encryption_configuration = encryption_config @@ -2303,108 +2168,6 @@ def test_query_external_gcs_permanent_table(client, to_delete): assert len(w_states) == 4 -def test_query_external_sheets_temporary_table(client): - # [START bigquery_query_external_sheets_temp] - # [START bigquery_auth_drive_scope] - import google.auth - - # from google.cloud import bigquery - - # Create credentials with Drive & BigQuery API scopes - # Both APIs must be enabled for your project before running this code - credentials, project = google.auth.default( - scopes=[ - "https://www.googleapis.com/auth/drive", - "https://www.googleapis.com/auth/bigquery", - ] - ) - client = bigquery.Client(credentials=credentials, project=project) - # [END bigquery_auth_drive_scope] - - # Configure the external data source and query job - external_config = bigquery.ExternalConfig("GOOGLE_SHEETS") - # Use a shareable link or grant viewing access to the email address you - # used to authenticate with BigQuery (this example Sheet is public) - sheet_url = ( - "https://docs.google.com/spreadsheets" - "/d/1i_QCL-7HcSyUZmIbP9E6lO_T5u3HnpLe7dnpHaijg_E/edit?usp=sharing" - ) - external_config.source_uris = [sheet_url] - external_config.schema = [ - bigquery.SchemaField("name", "STRING"), - bigquery.SchemaField("post_abbr", "STRING"), - ] - external_config.options.skip_leading_rows = 1 # optionally skip header row - table_id = "us_states" - job_config = bigquery.QueryJobConfig() - job_config.table_definitions = {table_id: external_config} - - # Example query to find states starting with 'W' - sql = 'SELECT * FROM `{}` WHERE name LIKE "W%"'.format(table_id) - - query_job = client.query(sql, job_config=job_config) # API request - - w_states = list(query_job) # Waits for query to finish - print("There are {} states with names starting with W.".format(len(w_states))) - # [END bigquery_query_external_sheets_temp] - assert len(w_states) == 4 - - -def test_query_external_sheets_permanent_table(client, to_delete): - dataset_id = "query_external_sheets_{}".format(_millis()) - dataset = bigquery.Dataset(client.dataset(dataset_id)) - client.create_dataset(dataset) - to_delete.append(dataset) - - # [START bigquery_query_external_sheets_perm] - import google.auth - - # from google.cloud import bigquery - # dataset_id = 'my_dataset' - - # Create credentials with Drive & BigQuery API scopes - # Both APIs must be enabled for your project before running this code - credentials, project = google.auth.default( - scopes=[ - "https://www.googleapis.com/auth/drive", - "https://www.googleapis.com/auth/bigquery", - ] - ) - client = bigquery.Client(credentials=credentials, project=project) - - # Configure the external data source - dataset_ref = client.dataset(dataset_id) - table_id = "us_states" - schema = [ - bigquery.SchemaField("name", "STRING"), - bigquery.SchemaField("post_abbr", "STRING"), - ] - table = bigquery.Table(dataset_ref.table(table_id), schema=schema) - external_config = bigquery.ExternalConfig("GOOGLE_SHEETS") - # Use a shareable link or grant viewing access to the email address you - # used to authenticate with BigQuery (this example Sheet is public) - sheet_url = ( - "https://docs.google.com/spreadsheets" - "/d/1i_QCL-7HcSyUZmIbP9E6lO_T5u3HnpLe7dnpHaijg_E/edit?usp=sharing" - ) - external_config.source_uris = [sheet_url] - external_config.options.skip_leading_rows = 1 # optionally skip header row - table.external_data_configuration = external_config - - # Create a permanent table linked to the Sheets file - table = client.create_table(table) # API request - - # Example query to find states starting with 'W' - sql = 'SELECT * FROM `{}.{}` WHERE name LIKE "W%"'.format(dataset_id, table_id) - - query_job = client.query(sql) # API request - - w_states = list(query_job) # Waits for query to finish - print("There are {} states with names starting with W.".format(len(w_states))) - # [END bigquery_query_external_sheets_perm] - assert len(w_states) == 4 - - def test_ddl_create_view(client, to_delete, capsys): """Create a view via a DDL query.""" project = client.project @@ -2462,42 +2225,6 @@ def test_ddl_create_view(client, to_delete, capsys): assert len(df) == 0 -def test_client_list_jobs(client): - """List jobs for a project.""" - - # [START bigquery_list_jobs] - # TODO(developer): Uncomment the lines below and replace with your values. - # from google.cloud import bigquery - # project = 'my_project' # replace with your project ID - # client = bigquery.Client(project=project) - import datetime - - # List the 10 most recent jobs in reverse chronological order. - # Omit the max_results parameter to list jobs from the past 6 months. - print("Last 10 jobs:") - for job in client.list_jobs(max_results=10): # API request(s) - print(job.job_id) - - # The following are examples of additional optional parameters: - - # Use min_creation_time and/or max_creation_time to specify a time window. - print("Jobs from the last ten minutes:") - ten_mins_ago = datetime.datetime.utcnow() - datetime.timedelta(minutes=10) - for job in client.list_jobs(min_creation_time=ten_mins_ago): - print(job.job_id) - - # Use all_users to include jobs run by all users in the project. - print("Last 10 jobs run by all users:") - for job in client.list_jobs(max_results=10, all_users=True): - print("{} run by user: {}".format(job.job_id, job.user_email)) - - # Use state_filter to filter by job state. - print("Jobs currently running:") - for job in client.list_jobs(state_filter="RUNNING"): - print(job.job_id) - # [END bigquery_list_jobs] - - @pytest.mark.skipif(pandas is None, reason="Requires `pandas`") def test_query_results_as_dataframe(client): # [START bigquery_query_results_dataframe] diff --git a/bigquery/docs/usage/jobs.rst b/bigquery/docs/usage/jobs.rst index 914d1d459ee7..c3dd71031bfc 100644 --- a/bigquery/docs/usage/jobs.rst +++ b/bigquery/docs/usage/jobs.rst @@ -1,9 +1,6 @@ Managing Jobs ~~~~~~~~~~~~~ -List jobs for a project -^^^^^^^^^^^^^^^^^^^^^^^ - Jobs describe actions performed on data in BigQuery tables: - Load data into a table @@ -11,7 +8,13 @@ Jobs describe actions performed on data in BigQuery tables: - Extract data from a table - Copy a table -.. literalinclude:: ../snippets.py +Listing jobs +^^^^^^^^^^^^ + +List jobs for a project with the +:func:`~google.cloud.bigquery.client.Client.list_jobs` method: + +.. literalinclude:: ../samples/client_list_jobs.py :language: python :dedent: 4 :start-after: [START bigquery_list_jobs] diff --git a/bigquery/docs/usage/queries.rst b/bigquery/docs/usage/queries.rst index fc77bb5b80cd..5c9dbe18fa63 100644 --- a/bigquery/docs/usage/queries.rst +++ b/bigquery/docs/usage/queries.rst @@ -4,9 +4,10 @@ Running Queries Querying data ^^^^^^^^^^^^^ -Run a query and wait for it to finish: +Run a query and wait for it to finish with the +:func:`~google.cloud.bigquery.client.Client.query` method: -.. literalinclude:: ../snippets.py +.. literalinclude:: ../samples/client_query.py :language: python :dedent: 4 :start-after: [START bigquery_query] @@ -47,3 +48,16 @@ See BigQuery documentation for more information on :dedent: 4 :start-after: [START bigquery_query_params_named] :end-before: [END bigquery_query_params_named] + +Run a script +^^^^^^^^^^^^ + +See BigQuery documentation for more information on `scripting in BigQuery +standard SQL +`_. + +.. literalinclude:: ../samples/query_script.py + :language: python + :dedent: 4 + :start-after: [START bigquery_query_script] + :end-before: [END bigquery_query_script] diff --git a/bigquery/docs/usage/tables.rst b/bigquery/docs/usage/tables.rst index 458c5b0009ba..d58dcc5d9ac4 100644 --- a/bigquery/docs/usage/tables.rst +++ b/bigquery/docs/usage/tables.rst @@ -28,6 +28,15 @@ Get a table resource with the :start-after: [START bigquery_get_table] :end-before: [END bigquery_get_table] +Determine if a table exists with the +:func:`~google.cloud.bigquery.client.Client.get_table` method: + +.. literalinclude:: ../samples/table_exists.py + :language: python + :dedent: 4 + :start-after: [START bigquery_table_exists] + :end-before: [END bigquery_table_exists] + Browse data rows in a table with the :func:`~google.cloud.bigquery.client.Client.list_rows` method: @@ -107,12 +116,26 @@ Update a property in a table's metadata with the Insert rows into a table's data with the :func:`~google.cloud.bigquery.client.Client.insert_rows` method: -.. literalinclude:: ../snippets.py +.. literalinclude:: ../samples/table_insert_rows.py :language: python :dedent: 4 :start-after: [START bigquery_table_insert_rows] :end-before: [END bigquery_table_insert_rows] +Insert rows into a table's data with the +:func:`~google.cloud.bigquery.client.Client.insert_rows` method, achieving +higher write limit: + +.. literalinclude:: ../samples/table_insert_rows_explicit_none_insert_ids.py + :language: python + :dedent: 4 + :start-after: [START bigquery_table_insert_rows_explicit_none_insert_ids] + :end-before: [END bigquery_table_insert_rows_explicit_none_insert_ids] + +Mind that inserting data with ``None`` row insert IDs can come at the expense of +more duplicate inserts. See also: +`Streaming inserts `_. + Add an empty column to the existing table with the :func:`~google.cloud.bigquery.update_table` method: @@ -128,7 +151,7 @@ Copying a Table Copy a table with the :func:`~google.cloud.bigquery.client.Client.copy_table` method: -.. literalinclude:: ../snippets.py +.. literalinclude:: ../samples/copy_table.py :language: python :dedent: 4 :start-after: [START bigquery_copy_table] diff --git a/bigquery/google/cloud/bigquery/__init__.py b/bigquery/google/cloud/bigquery/__init__.py index bda8c5611435..3982c1175850 100644 --- a/bigquery/google/cloud/bigquery/__init__.py +++ b/bigquery/google/cloud/bigquery/__init__.py @@ -73,12 +73,14 @@ from google.cloud.bigquery.routine import RoutineArgument from google.cloud.bigquery.routine import RoutineReference from google.cloud.bigquery.schema import SchemaField -from google.cloud.bigquery.table import EncryptionConfiguration +from google.cloud.bigquery.table import PartitionRange +from google.cloud.bigquery.table import RangePartitioning +from google.cloud.bigquery.table import Row from google.cloud.bigquery.table import Table from google.cloud.bigquery.table import TableReference -from google.cloud.bigquery.table import Row from google.cloud.bigquery.table import TimePartitioningType from google.cloud.bigquery.table import TimePartitioning +from google.cloud.bigquery.encryption_configuration import EncryptionConfiguration __all__ = [ "__version__", @@ -94,10 +96,14 @@ "DatasetReference", "AccessEntry", # Tables - "EncryptionConfiguration", "Table", "TableReference", + "PartitionRange", + "RangePartitioning", "Row", + "TimePartitioning", + "TimePartitioningType", + # Jobs "CopyJob", "CopyJobConfig", "ExtractJob", @@ -105,8 +111,6 @@ "LoadJob", "LoadJobConfig", "UnknownJob", - "TimePartitioningType", - "TimePartitioning", # Models "Model", "ModelReference", @@ -136,6 +140,8 @@ "StandardSqlDataTypes", "SourceFormat", "WriteDisposition", + # EncryptionConfiguration + "EncryptionConfiguration", ] diff --git a/bigquery/google/cloud/bigquery/_helpers.py b/bigquery/google/cloud/bigquery/_helpers.py index eb5161c9fe71..98eadb0a2f8e 100644 --- a/bigquery/google/cloud/bigquery/_helpers.py +++ b/bigquery/google/cloud/bigquery/_helpers.py @@ -90,12 +90,15 @@ def _timestamp_query_param_from_json(value, field): Args: value (str): The timestamp. - field (.SchemaField): The field corresponding to the value. + + field (google.cloud.bigquery.schema.SchemaField): + The field corresponding to the value. Returns: - Optional[datetime.datetime]: The parsed datetime object from - ``value`` if the ``field`` is not null (otherwise it is - :data:`None`). + Optional[datetime.datetime]: + The parsed datetime object from + ``value`` if the ``field`` is not null (otherwise it is + :data:`None`). """ if _not_null(value, field): # Canonical formats for timestamps in BigQuery are flexible. See: @@ -125,12 +128,14 @@ def _datetime_from_json(value, field): Args: value (str): The timestamp. - field (.SchemaField): The field corresponding to the value. + field (google.cloud.bigquery.schema.SchemaField): + The field corresponding to the value. Returns: - Optional[datetime.datetime]: The parsed datetime object from - ``value`` if the ``field`` is not null (otherwise it is - :data:`None`). + Optional[datetime.datetime]: + The parsed datetime object from + ``value`` if the ``field`` is not null (otherwise it is + :data:`None`). """ if _not_null(value, field): if "." in value: @@ -217,16 +222,20 @@ def _row_tuple_from_json(row, schema): Note: ``row['f']`` and ``schema`` are presumed to be of the same length. - :type row: dict - :param row: A JSON response row to be converted. - - :type schema: tuple - :param schema: A tuple of - :class:`~google.cloud.bigquery.schema.SchemaField`. + Args: + row (Dict): A JSON response row to be converted. + schema (Sequence[Union[ \ + :class:`~google.cloud.bigquery.schema.SchemaField`, \ + Mapping[str, Any] \ + ]]): Specification of the field types in ``row``. - :rtype: tuple - :returns: A tuple of data converted to native types. + Returns: + Tuple: A tuple of data converted to native types. """ + from google.cloud.bigquery.schema import _to_schema_fields + + schema = _to_schema_fields(schema) + row_data = [] for field, cell in zip(schema, row["f"]): row_data.append(_field_from_json(cell["v"], field)) @@ -234,9 +243,25 @@ def _row_tuple_from_json(row, schema): def _rows_from_json(values, schema): - """Convert JSON row data to rows with appropriate types.""" + """Convert JSON row data to rows with appropriate types. + + Args: + values (Sequence[Dict]): The list of responses (JSON rows) to convert. + schema (Sequence[Union[ \ + :class:`~google.cloud.bigquery.schema.SchemaField`, \ + Mapping[str, Any] \ + ]]): + The table's schema. If any item is a mapping, its content must be + compatible with + :meth:`~google.cloud.bigquery.schema.SchemaField.from_api_repr`. + + Returns: + List[:class:`~google.cloud.bigquery.Row`] + """ from google.cloud.bigquery import Row + from google.cloud.bigquery.schema import _to_schema_fields + schema = _to_schema_fields(schema) field_to_index = _field_to_index_mapping(schema) return [Row(_row_tuple_from_json(r, schema), field_to_index) for r in values] @@ -344,16 +369,13 @@ def _scalar_field_to_json(field, row_value): """Maps a field and value to a JSON-safe value. Args: - field ( \ - :class:`~google.cloud.bigquery.schema.SchemaField`, \ - ): + field (google.cloud.bigquery.schema.SchemaField): The SchemaField to use for type conversion and field name. - row_value (any): + row_value (Any): Value to be converted, based on the field's type. Returns: - any: - A JSON-serializable object. + Any: A JSON-serializable object. """ converter = _SCALAR_VALUE_TO_JSON_ROW.get(field.field_type) if converter is None: # STRING doesn't need converting @@ -365,17 +387,14 @@ def _repeated_field_to_json(field, row_value): """Convert a repeated/array field to its JSON representation. Args: - field ( \ - :class:`~google.cloud.bigquery.schema.SchemaField`, \ - ): + field (google.cloud.bigquery.schema.SchemaField): The SchemaField to use for type conversion and field name. The field mode must equal ``REPEATED``. - row_value (Sequence[any]): + row_value (Sequence[Any]): A sequence of values to convert to JSON-serializable values. Returns: - List[any]: - A list of JSON-serializable objects. + List[Any]: A list of JSON-serializable objects. """ # Remove the REPEATED, but keep the other fields. This allows us to process # each item as if it were a top-level field. @@ -391,17 +410,14 @@ def _record_field_to_json(fields, row_value): """Convert a record/struct field to its JSON representation. Args: - fields ( \ - Sequence[:class:`~google.cloud.bigquery.schema.SchemaField`], \ - ): + fields (Sequence[google.cloud.bigquery.schema.SchemaField]): The :class:`~google.cloud.bigquery.schema.SchemaField`s of the record's subfields to use for type conversion and field names. row_value (Union[Tuple[Any], Mapping[str, Any]): A tuple or dictionary to convert to JSON-serializable values. Returns: - Mapping[str, any]: - A JSON-serializable dictionary. + Mapping[str, Any]: A JSON-serializable dictionary. """ record = {} isdict = isinstance(row_value, dict) @@ -420,22 +436,16 @@ def _field_to_json(field, row_value): """Convert a field into JSON-serializable values. Args: - field ( \ - :class:`~google.cloud.bigquery.schema.SchemaField`, \ - ): + field (google.cloud.bigquery.schema.SchemaField): The SchemaField to use for type conversion and field name. - row_value (Union[ \ - Sequence[list], \ - any, \ - ]): + row_value (Union[Sequence[List], Any]): Row data to be inserted. If the SchemaField's mode is REPEATED, assume this is a list. If not, the type is inferred from the SchemaField's field_type. Returns: - any: - A JSON-serializable object. + Any: A JSON-serializable object. """ if row_value is None: return None @@ -461,9 +471,9 @@ def _get_sub_prop(container, keys, default=None): This method works like ``dict.get(key)``, but for nested values. Arguments: - container (dict): + container (Dict): A dictionary which may contain other dictionaries as values. - keys (iterable): + keys (Iterable): A sequence of keys to attempt to get the value for. Each item in the sequence represents a deeper nesting. The first key is for the top level. If there is a dictionary there, the second key @@ -504,9 +514,9 @@ def _set_sub_prop(container, keys, value): """Set a nested value in a dictionary. Arguments: - container (dict): + container (Dict): A dictionary which may contain other dictionaries as values. - keys (iterable): + keys (Iterable): A sequence of keys to attempt to set the value for. Each item in the sequence represents a deeper nesting. The first key is for the top level. If there is a dictionary there, the second key @@ -547,9 +557,9 @@ def _del_sub_prop(container, keys): """Remove a nested key fro a dictionary. Arguments: - container (dict): + container (Dict): A dictionary which may contain other dictionaries as values. - keys (iterable): + keys (Iterable): A sequence of keys to attempt to clear the value for. Each item in the sequence represents a deeper nesting. The first key is for the top level. If there is a dictionary there, the second key @@ -658,3 +668,18 @@ def _build_resource_from_properties(obj, filter_fields): partial[filter_field] = obj._properties[filter_field] return partial + + +def _verify_job_config_type(job_config, expected_type, param_name="job_config"): + if not isinstance(job_config, expected_type): + msg = ( + "Expected an instance of {expected_type} class for the {param_name} parameter, " + "but received {param_name} = {job_config}" + ) + raise TypeError( + msg.format( + expected_type=expected_type.__name__, + param_name=param_name, + job_config=job_config, + ) + ) diff --git a/bigquery/google/cloud/bigquery/_http.py b/bigquery/google/cloud/bigquery/_http.py index dd0d9d01c9de..2ff4effefb76 100644 --- a/bigquery/google/cloud/bigquery/_http.py +++ b/bigquery/google/cloud/bigquery/_http.py @@ -22,11 +22,10 @@ class Connection(_http.JSONConnection): """A connection to Google BigQuery via the JSON REST API. - :type client: :class:`~google.cloud.bigquery.client.Client` - :param client: The client that owns the current connection. + Args: + client (google.cloud.bigquery.client.Client): The client that owns the current connection. - :type client_info: :class:`~google.api_core.client_info.ClientInfo` - :param client_info: (Optional) instance used to generate user agent. + client_info (google.api_core.client_info.ClientInfo): (Optional) instance used to generate user agent. """ DEFAULT_API_ENDPOINT = "https://bigquery.googleapis.com" diff --git a/bigquery/google/cloud/bigquery/_pandas_helpers.py b/bigquery/google/cloud/bigquery/_pandas_helpers.py index bfbaf92bbe38..6e91a9624b06 100644 --- a/bigquery/google/cloud/bigquery/_pandas_helpers.py +++ b/bigquery/google/cloud/bigquery/_pandas_helpers.py @@ -110,8 +110,35 @@ def pyarrow_timestamp(): "TIME": pyarrow_time, "TIMESTAMP": pyarrow_timestamp, } + ARROW_SCALAR_IDS_TO_BQ = { + # https://arrow.apache.org/docs/python/api/datatypes.html#type-classes + pyarrow.bool_().id: "BOOL", + pyarrow.int8().id: "INT64", + pyarrow.int16().id: "INT64", + pyarrow.int32().id: "INT64", + pyarrow.int64().id: "INT64", + pyarrow.uint8().id: "INT64", + pyarrow.uint16().id: "INT64", + pyarrow.uint32().id: "INT64", + pyarrow.uint64().id: "INT64", + pyarrow.float16().id: "FLOAT64", + pyarrow.float32().id: "FLOAT64", + pyarrow.float64().id: "FLOAT64", + pyarrow.time32("ms").id: "TIME", + pyarrow.time64("ns").id: "TIME", + pyarrow.timestamp("ns").id: "TIMESTAMP", + pyarrow.date32().id: "DATE", + pyarrow.date64().id: "DATETIME", # because millisecond resolution + pyarrow.binary().id: "BYTES", + pyarrow.string().id: "STRING", # also alias for pyarrow.utf8() + pyarrow.decimal128(38, scale=9).id: "NUMERIC", + # The exact decimal's scale and precision are not important, as only + # the type ID matters, and it's the same for all decimal128 instances. + } + else: # pragma: NO COVER BQ_TO_ARROW_SCALARS = {} # pragma: NO COVER + ARROW_SCALAR_IDS_TO_BQ = {} # pragma: NO_COVER def bq_to_arrow_struct_data_type(field): @@ -130,7 +157,8 @@ def bq_to_arrow_struct_data_type(field): def bq_to_arrow_data_type(field): """Return the Arrow data type, corresponding to a given BigQuery column. - Returns None if default Arrow type inspection should be used. + Returns: + None: if default Arrow type inspection should be used. """ if field.mode is not None and field.mode.upper() == "REPEATED": inner_type = bq_to_arrow_data_type( @@ -140,10 +168,11 @@ def bq_to_arrow_data_type(field): return pyarrow.list_(inner_type) return None - if field.field_type.upper() in schema._STRUCT_TYPES: + field_type_upper = field.field_type.upper() if field.field_type else "" + if field_type_upper in schema._STRUCT_TYPES: return bq_to_arrow_struct_data_type(field) - data_type_constructor = BQ_TO_ARROW_SCALARS.get(field.field_type.upper()) + data_type_constructor = BQ_TO_ARROW_SCALARS.get(field_type_upper) if data_type_constructor is None: return None return data_type_constructor() @@ -152,7 +181,8 @@ def bq_to_arrow_data_type(field): def bq_to_arrow_field(bq_field): """Return the Arrow field, corresponding to a given BigQuery column. - Returns None if the Arrow type cannot be determined. + Returns: + None: if the Arrow type cannot be determined. """ arrow_type = bq_to_arrow_data_type(bq_field) if arrow_type: @@ -166,7 +196,8 @@ def bq_to_arrow_field(bq_field): def bq_to_arrow_schema(bq_schema): """Return the Arrow schema, corresponding to a given BigQuery schema. - Returns None if any Arrow type cannot be determined. + Returns: + None: if any Arrow type cannot be determined. """ arrow_fields = [] for bq_field in bq_schema: @@ -180,9 +211,12 @@ def bq_to_arrow_schema(bq_schema): def bq_to_arrow_array(series, bq_field): arrow_type = bq_to_arrow_data_type(bq_field) + + field_type_upper = bq_field.field_type.upper() if bq_field.field_type else "" + if bq_field.mode.upper() == "REPEATED": return pyarrow.ListArray.from_pandas(series, type=arrow_type) - if bq_field.field_type.upper() in schema._STRUCT_TYPES: + if field_type_upper in schema._STRUCT_TYPES: return pyarrow.StructArray.from_pandas(series, type=arrow_type) return pyarrow.array(series, type=arrow_type) @@ -236,7 +270,10 @@ def dataframe_to_bq_schema(dataframe, bq_schema): Args: dataframe (pandas.DataFrame): DataFrame for which the client determines the BigQuery schema. - bq_schema (Sequence[google.cloud.bigquery.schema.SchemaField]): + bq_schema (Sequence[Union[ \ + :class:`~google.cloud.bigquery.schema.SchemaField`, \ + Mapping[str, Any] \ + ]]): A BigQuery schema. Use this argument to override the autodetected type for some or all of the DataFrame columns. @@ -246,6 +283,7 @@ def dataframe_to_bq_schema(dataframe, bq_schema): any column cannot be determined. """ if bq_schema: + bq_schema = schema._to_schema_fields(bq_schema) for field in bq_schema: if field.field_type in schema._STRUCT_TYPES: raise ValueError( @@ -260,6 +298,8 @@ def dataframe_to_bq_schema(dataframe, bq_schema): bq_schema_unused = set() bq_schema_out = [] + unknown_type_fields = [] + for column, dtype in list_columns_and_indexes(dataframe): # Use provided type from schema, if present. bq_field = bq_schema_index.get(column) @@ -271,12 +311,12 @@ def dataframe_to_bq_schema(dataframe, bq_schema): # Otherwise, try to automatically determine the type based on the # pandas dtype. bq_type = _PANDAS_DTYPE_TO_BQ.get(dtype.name) - if not bq_type: - warnings.warn(u"Unable to determine type of column '{}'.".format(column)) - return None bq_field = schema.SchemaField(column, bq_type) bq_schema_out.append(bq_field) + if bq_field.field_type is None: + unknown_type_fields.append(bq_field) + # Catch any schema mismatch. The developer explicitly asked to serialize a # column, but it was not found. if bq_schema_unused: @@ -285,7 +325,73 @@ def dataframe_to_bq_schema(dataframe, bq_schema): bq_schema_unused ) ) - return tuple(bq_schema_out) + + # If schema detection was not successful for all columns, also try with + # pyarrow, if available. + if unknown_type_fields: + if not pyarrow: + msg = u"Could not determine the type of columns: {}".format( + ", ".join(field.name for field in unknown_type_fields) + ) + warnings.warn(msg) + return None # We cannot detect the schema in full. + + # The augment_schema() helper itself will also issue unknown type + # warnings if detection still fails for any of the fields. + bq_schema_out = augment_schema(dataframe, bq_schema_out) + + return tuple(bq_schema_out) if bq_schema_out else None + + +def augment_schema(dataframe, current_bq_schema): + """Try to deduce the unknown field types and return an improved schema. + + This function requires ``pyarrow`` to run. If all the missing types still + cannot be detected, ``None`` is returned. If all types are already known, + a shallow copy of the given schema is returned. + + Args: + dataframe (pandas.DataFrame): + DataFrame for which some of the field types are still unknown. + current_bq_schema (Sequence[google.cloud.bigquery.schema.SchemaField]): + A BigQuery schema for ``dataframe``. The types of some or all of + the fields may be ``None``. + Returns: + Optional[Sequence[google.cloud.bigquery.schema.SchemaField]] + """ + augmented_schema = [] + unknown_type_fields = [] + + for field in current_bq_schema: + if field.field_type is not None: + augmented_schema.append(field) + continue + + arrow_table = pyarrow.array(dataframe[field.name]) + detected_type = ARROW_SCALAR_IDS_TO_BQ.get(arrow_table.type.id) + + if detected_type is None: + unknown_type_fields.append(field) + continue + + new_field = schema.SchemaField( + name=field.name, + field_type=detected_type, + mode=field.mode, + description=field.description, + fields=field.fields, + ) + augmented_schema.append(new_field) + + if unknown_type_fields: + warnings.warn( + u"Pyarrow could not determine the type of columns: {}.".format( + ", ".join(field.name for field in unknown_type_fields) + ) + ) + return None + + return augmented_schema def dataframe_to_arrow(dataframe, bq_schema): @@ -294,9 +400,12 @@ def dataframe_to_arrow(dataframe, bq_schema): Args: dataframe (pandas.DataFrame): DataFrame to convert to Arrow table. - bq_schema (Sequence[google.cloud.bigquery.schema.SchemaField]): - Desired BigQuery schema. Number of columns must match number of - columns in the DataFrame. + bq_schema (Sequence[Union[ \ + :class:`~google.cloud.bigquery.schema.SchemaField`, \ + Mapping[str, Any] \ + ]]): + Desired BigQuery schema. The number of columns must match the + number of columns in the DataFrame. Returns: pyarrow.Table: @@ -307,6 +416,8 @@ def dataframe_to_arrow(dataframe, bq_schema): column_and_index_names = set( name for name, _ in list_columns_and_indexes(dataframe) ) + + bq_schema = schema._to_schema_fields(bq_schema) bq_field_names = set(field.name for field in bq_schema) extra_fields = bq_field_names - column_and_index_names @@ -351,7 +462,10 @@ def dataframe_to_parquet(dataframe, bq_schema, filepath, parquet_compression="SN Args: dataframe (pandas.DataFrame): DataFrame to convert to Parquet file. - bq_schema (Sequence[google.cloud.bigquery.schema.SchemaField]): + bq_schema (Sequence[Union[ \ + :class:`~google.cloud.bigquery.schema.SchemaField`, \ + Mapping[str, Any] \ + ]]): Desired BigQuery schema. Number of columns must match number of columns in the DataFrame. filepath (str): @@ -365,6 +479,7 @@ def dataframe_to_parquet(dataframe, bq_schema, filepath, parquet_compression="SN if pyarrow is None: raise ValueError("pyarrow is required for BigQuery schema conversion.") + bq_schema = schema._to_schema_fields(bq_schema) arrow_table = dataframe_to_arrow(dataframe, bq_schema) pyarrow.parquet.write_table(arrow_table, filepath, compression=parquet_compression) @@ -380,13 +495,29 @@ def _tabledata_list_page_to_arrow(page, column_names, arrow_types): for column_index, arrow_type in enumerate(arrow_types): arrays.append(pyarrow.array(page._columns[column_index], type=arrow_type)) - return pyarrow.RecordBatch.from_arrays(arrays, column_names) + if isinstance(column_names, pyarrow.Schema): + return pyarrow.RecordBatch.from_arrays(arrays, schema=column_names) + return pyarrow.RecordBatch.from_arrays(arrays, names=column_names) + +def download_arrow_tabledata_list(pages, bq_schema): + """Use tabledata.list to construct an iterable of RecordBatches. -def download_arrow_tabledata_list(pages, schema): - """Use tabledata.list to construct an iterable of RecordBatches.""" - column_names = bq_to_arrow_schema(schema) or [field.name for field in schema] - arrow_types = [bq_to_arrow_data_type(field) for field in schema] + Args: + pages (Iterator[:class:`google.api_core.page_iterator.Page`]): + An iterator over the result pages. + bq_schema (Sequence[Union[ \ + :class:`~google.cloud.bigquery.schema.SchemaField`, \ + Mapping[str, Any] \ + ]]): + A decription of the fields in result pages. + Yields: + :class:`pyarrow.RecordBatch` + The next page of records as a ``pyarrow`` record batch. + """ + bq_schema = schema._to_schema_fields(bq_schema) + column_names = bq_to_arrow_schema(bq_schema) or [field.name for field in bq_schema] + arrow_types = [bq_to_arrow_data_type(field) for field in bq_schema] for page in pages: yield _tabledata_list_page_to_arrow(page, column_names, arrow_types) @@ -407,9 +538,26 @@ def _tabledata_list_page_to_dataframe(page, column_names, dtypes): return pandas.DataFrame(columns, columns=column_names) -def download_dataframe_tabledata_list(pages, schema, dtypes): - """Use (slower, but free) tabledata.list to construct a DataFrame.""" - column_names = [field.name for field in schema] +def download_dataframe_tabledata_list(pages, bq_schema, dtypes): + """Use (slower, but free) tabledata.list to construct a DataFrame. + + Args: + pages (Iterator[:class:`google.api_core.page_iterator.Page`]): + An iterator over the result pages. + bq_schema (Sequence[Union[ \ + :class:`~google.cloud.bigquery.schema.SchemaField`, \ + Mapping[str, Any] \ + ]]): + A decription of the fields in result pages. + dtypes(Mapping[str, numpy.dtype]): + The types of columns in result data to hint construction of the + resulting DataFrame. Not all column types have to be specified. + Yields: + :class:`pandas.DataFrame` + The next page of records as a ``pandas.DataFrame`` record batch. + """ + bq_schema = schema._to_schema_fields(bq_schema) + column_names = [field.name for field in bq_schema] for page in pages: yield _tabledata_list_page_to_dataframe(page, column_names, dtypes) diff --git a/bigquery/google/cloud/bigquery/client.py b/bigquery/google/cloud/bigquery/client.py index f14444c0c48a..bae4359300f8 100644 --- a/bigquery/google/cloud/bigquery/client.py +++ b/bigquery/google/cloud/bigquery/client.py @@ -53,6 +53,7 @@ from google.cloud.bigquery._helpers import _record_field_to_json from google.cloud.bigquery._helpers import _str_or_none +from google.cloud.bigquery._helpers import _verify_job_config_type from google.cloud.bigquery._http import Connection from google.cloud.bigquery import _pandas_helpers from google.cloud.bigquery.dataset import Dataset @@ -95,14 +96,12 @@ class Project(object): """Wrapper for resource describing a BigQuery project. - :type project_id: str - :param project_id: Opaque ID of the project + Args: + project_id (str): Opaque ID of the project - :type numeric_id: int - :param numeric_id: Numeric ID of the project + numeric_id (int): Numeric ID of the project - :type friendly_name: str - :param friendly_name: Display name of the project + friendly_name (str): Display name of the project """ def __init__(self, project_id, numeric_id, friendly_name): @@ -146,7 +145,7 @@ class Client(ClientWithProject): requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own library or partner tool. - client_options (Union[~google.api_core.client_options.ClientOptions, dict]): + client_options (Union[google.api_core.client_options.ClientOptions, Dict]): (Optional) Client options used to set user options on the client. API Endpoint should be set through client_options. @@ -230,25 +229,25 @@ def list_projects(self, max_results=None, page_token=None, retry=DEFAULT_RETRY): See https://cloud.google.com/bigquery/docs/reference/rest/v2/projects/list - :type max_results: int - :param max_results: (Optional) maximum number of projects to return, - If not passed, defaults to a value set by the API. - - :type page_token: str - :param page_token: - (Optional) Token representing a cursor into the projects. If - not passed, the API will return the first page of projects. - The token marks the beginning of the iterator to be returned - and the value of the ``page_token`` can be accessed at - ``next_page_token`` of the - :class:`~google.api_core.page_iterator.HTTPIterator`. - - :type retry: :class:`google.api_core.retry.Retry` - :param retry: (Optional) How to retry the RPC. - - :rtype: :class:`~google.api_core.page_iterator.Iterator` - :returns: Iterator of :class:`~google.cloud.bigquery.client.Project` - accessible to the current client. + Args: + max_results (int): + (Optional) maximum number of projects to return, + If not passed, defaults to a value set by the API. + + page_token (str): + (Optional) Token representing a cursor into the projects. If + not passed, the API will return the first page of projects. + The token marks the beginning of the iterator to be returned + and the value of the ``page_token`` can be accessed at + ``next_page_token`` of the + :class:`~google.api_core.page_iterator.HTTPIterator`. + + retry (google.api_core.retry.Retry): (Optional) How to retry the RPC. + + Returns: + google.api_core.page_iterator.Iterator: + Iterator of :class:`~google.cloud.bigquery.client.Project` + accessible to the current client. """ return page_iterator.HTTPIterator( client=self, @@ -284,7 +283,7 @@ def list_datasets( filter (str): Optional. An expression for filtering the results by label. For syntax, see - https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets/list#filter. + https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets/list#body.QUERY_PARAMETERS.filter max_results (int): Optional. Maximum number of datasets to return. page_token (str): @@ -299,8 +298,7 @@ def list_datasets( Returns: google.api_core.page_iterator.Iterator: - Iterator of - :class:`~google.cloud.bigquery.dataset.DatasetListItem`. + Iterator of :class:`~google.cloud.bigquery.dataset.DatasetListItem`. associated with the project. """ extra_params = {} @@ -327,15 +325,16 @@ def list_datasets( def dataset(self, dataset_id, project=None): """Construct a reference to a dataset. - :type dataset_id: str - :param dataset_id: ID of the dataset. + Args: + dataset_id (str): ID of the dataset. - :type project: str - :param project: (Optional) project ID for the dataset (defaults to - the project of the client). + project (str): + (Optional) project ID for the dataset (defaults to + the project of the client). - :rtype: :class:`google.cloud.bigquery.dataset.DatasetReference` - :returns: a new ``DatasetReference`` instance + Returns: + google.cloud.bigquery.dataset.DatasetReference: + a new ``DatasetReference`` instance. """ if project is None: project = self.project @@ -350,8 +349,8 @@ def create_dataset(self, dataset, exists_ok=False, retry=DEFAULT_RETRY): Args: dataset (Union[ \ - :class:`~google.cloud.bigquery.dataset.Dataset`, \ - :class:`~google.cloud.bigquery.dataset.DatasetReference`, \ + google.cloud.bigquery.dataset.Dataset, \ + google.cloud.bigquery.dataset.DatasetReference, \ str, \ ]): A :class:`~google.cloud.bigquery.dataset.Dataset` to create. @@ -403,7 +402,7 @@ def create_routine(self, routine, exists_ok=False, retry=DEFAULT_RETRY): https://cloud.google.com/bigquery/docs/reference/rest/v2/routines/insert Args: - routine (:class:`~google.cloud.bigquery.routine.Routine`): + routine (google.cloud.bigquery.routine.Routine): A :class:`~google.cloud.bigquery.routine.Routine` to create. The dataset that the routine belongs to must already exist. exists_ok (bool): @@ -439,8 +438,8 @@ def create_table(self, table, exists_ok=False, retry=DEFAULT_RETRY): Args: table (Union[ \ - :class:`~google.cloud.bigquery.table.Table`, \ - :class:`~google.cloud.bigquery.table.TableReference`, \ + google.cloud.bigquery.table.Table, \ + google.cloud.bigquery.table.TableReference, \ str, \ ]): A :class:`~google.cloud.bigquery.table.Table` to create. @@ -480,14 +479,14 @@ def get_dataset(self, dataset_ref, retry=DEFAULT_RETRY): Args: dataset_ref (Union[ \ - :class:`~google.cloud.bigquery.dataset.DatasetReference`, \ + google.cloud.bigquery.dataset.DatasetReference, \ str, \ ]): A reference to the dataset to fetch from the BigQuery API. If a string is passed in, this method attempts to create a dataset reference from a string using :func:`~google.cloud.bigquery.dataset.DatasetReference.from_string`. - retry (:class:`google.api_core.retry.Retry`): + retry (google.api_core.retry.Retry): (Optional) How to retry the RPC. Returns: @@ -507,19 +506,18 @@ def get_model(self, model_ref, retry=DEFAULT_RETRY): Args: model_ref (Union[ \ - :class:`~google.cloud.bigquery.model.ModelReference`, \ + google.cloud.bigquery.model.ModelReference, \ str, \ ]): A reference to the model to fetch from the BigQuery API. If a string is passed in, this method attempts to create a model reference from a string using :func:`google.cloud.bigquery.model.ModelReference.from_string`. - retry (:class:`google.api_core.retry.Retry`): + retry (google.api_core.retry.Retry): (Optional) How to retry the RPC. Returns: - google.cloud.bigquery.model.Model: - A ``Model`` instance. + google.cloud.bigquery.model.Model: A ``Model`` instance. """ if isinstance(model_ref, str): model_ref = ModelReference.from_string( @@ -534,15 +532,15 @@ def get_routine(self, routine_ref, retry=DEFAULT_RETRY): Args: routine_ref (Union[ \ - :class:`~google.cloud.bigquery.routine.Routine`, \ - :class:`~google.cloud.bigquery.routine.RoutineReference`, \ + google.cloud.bigquery.routine.Routine, \ + google.cloud.bigquery.routine.RoutineReference, \ str, \ ]): A reference to the routine to fetch from the BigQuery API. If a string is passed in, this method attempts to create a reference from a string using :func:`google.cloud.bigquery.routine.RoutineReference.from_string`. - retry (:class:`google.api_core.retry.Retry`): + retry (google.api_core.retry.Retry): (Optional) How to retry the API call. Returns: @@ -562,15 +560,15 @@ def get_table(self, table, retry=DEFAULT_RETRY): Args: table (Union[ \ - :class:`~google.cloud.bigquery.table.Table`, \ - :class:`~google.cloud.bigquery.table.TableReference`, \ + google.cloud.bigquery.table.Table, \ + google.cloud.bigquery.table.TableReference, \ str, \ ]): A reference to the table to fetch from the BigQuery API. If a string is passed in, this method attempts to create a table reference from a string using :func:`google.cloud.bigquery.table.TableReference.from_string`. - retry (:class:`google.api_core.retry.Retry`): + retry (google.api_core.retry.Retry): (Optional) How to retry the RPC. Returns: @@ -743,8 +741,8 @@ def list_models( Args: dataset (Union[ \ - :class:`~google.cloud.bigquery.dataset.Dataset`, \ - :class:`~google.cloud.bigquery.dataset.DatasetReference`, \ + google.cloud.bigquery.dataset.Dataset, \ + google.cloud.bigquery.dataset.DatasetReference, \ str, \ ]): A reference to the dataset whose models to list from the @@ -761,7 +759,7 @@ def list_models( the value of the ``page_token`` can be accessed at ``next_page_token`` of the :class:`~google.api_core.page_iterator.HTTPIterator`. - retry (:class:`google.api_core.retry.Retry`): + retry (google.api_core.retry.Retry): (Optional) How to retry the RPC. Returns: @@ -801,8 +799,8 @@ def list_routines( Args: dataset (Union[ \ - :class:`~google.cloud.bigquery.dataset.Dataset`, \ - :class:`~google.cloud.bigquery.dataset.DatasetReference`, \ + google.cloud.bigquery.dataset.Dataset, \ + google.cloud.bigquery.dataset.DatasetReference, \ str, \ ]): A reference to the dataset whose routines to list from the @@ -819,7 +817,7 @@ def list_routines( the value of the ``page_token`` can be accessed at ``next_page_token`` of the :class:`~google.api_core.page_iterator.HTTPIterator`. - retry (:class:`google.api_core.retry.Retry`): + retry (google.api_core.retry.Retry): (Optional) How to retry the RPC. Returns: @@ -859,8 +857,8 @@ def list_tables( Args: dataset (Union[ \ - :class:`~google.cloud.bigquery.dataset.Dataset`, \ - :class:`~google.cloud.bigquery.dataset.DatasetReference`, \ + google.cloud.bigquery.dataset.Dataset, \ + google.cloud.bigquery.dataset.DatasetReference, \ str, \ ]): A reference to the dataset whose tables to list from the @@ -877,7 +875,7 @@ def list_tables( the value of the ``page_token`` can be accessed at ``next_page_token`` of the :class:`~google.api_core.page_iterator.HTTPIterator`. - retry (:class:`google.api_core.retry.Retry`): + retry (google.api_core.retry.Retry): (Optional) How to retry the RPC. Returns: @@ -917,8 +915,8 @@ def delete_dataset( Args dataset (Union[ \ - :class:`~google.cloud.bigquery.dataset.Dataset`, \ - :class:`~google.cloud.bigquery.dataset.DatasetReference`, \ + google.cloud.bigquery.dataset.Dataset, \ + google.cloud.bigquery.dataset.DatasetReference, \ str, \ ]): A reference to the dataset to delete. If a string is passed @@ -929,7 +927,7 @@ def delete_dataset( (Optional) If True, delete all the tables in the dataset. If False and the dataset contains tables, the request will fail. Default is False. - retry (:class:`google.api_core.retry.Retry`): + retry (google.api_core.retry.Retry): (Optional) How to retry the RPC. not_found_ok (bool): Defaults to ``False``. If ``True``, ignore "not found" errors @@ -963,15 +961,15 @@ def delete_model(self, model, retry=DEFAULT_RETRY, not_found_ok=False): Args: model (Union[ \ - :class:`~google.cloud.bigquery.model.Model`, \ - :class:`~google.cloud.bigquery.model.ModelReference`, \ + google.cloud.bigquery.model.Model, \ + google.cloud.bigquery.model.ModelReference, \ str, \ ]): A reference to the model to delete. If a string is passed in, this method attempts to create a model reference from a string using :func:`google.cloud.bigquery.model.ModelReference.from_string`. - retry (:class:`google.api_core.retry.Retry`): + retry (google.api_core.retry.Retry): (Optional) How to retry the RPC. not_found_ok (bool): Defaults to ``False``. If ``True``, ignore "not found" errors @@ -997,15 +995,15 @@ def delete_routine(self, routine, retry=DEFAULT_RETRY, not_found_ok=False): Args: model (Union[ \ - :class:`~google.cloud.bigquery.routine.Routine`, \ - :class:`~google.cloud.bigquery.routine.RoutineReference`, \ + google.cloud.bigquery.routine.Routine, \ + google.cloud.bigquery.routine.RoutineReference, \ str, \ ]): A reference to the routine to delete. If a string is passed in, this method attempts to create a routine reference from a string using :func:`google.cloud.bigquery.routine.RoutineReference.from_string`. - retry (:class:`google.api_core.retry.Retry`): + retry (google.api_core.retry.Retry): (Optional) How to retry the RPC. not_found_ok (bool): Defaults to ``False``. If ``True``, ignore "not found" errors @@ -1033,15 +1031,15 @@ def delete_table(self, table, retry=DEFAULT_RETRY, not_found_ok=False): Args: table (Union[ \ - :class:`~google.cloud.bigquery.table.Table`, \ - :class:`~google.cloud.bigquery.table.TableReference`, \ + google.cloud.bigquery.table.Table, \ + google.cloud.bigquery.table.TableReference, \ str, \ ]): A reference to the table to delete. If a string is passed in, this method attempts to create a table reference from a string using :func:`google.cloud.bigquery.table.TableReference.from_string`. - retry (:class:`google.api_core.retry.Retry`): + retry (google.api_core.retry.Retry): (Optional) How to retry the RPC. not_found_ok (bool): Defaults to ``False``. If ``True``, ignore "not found" errors @@ -1106,15 +1104,17 @@ def _get_query_results( def job_from_resource(self, resource): """Detect correct job type from resource and instantiate. - :type resource: dict - :param resource: one job resource from API response + Args: + resource (Dict): one job resource from API response - :rtype: One of: - :class:`google.cloud.bigquery.job.LoadJob`, - :class:`google.cloud.bigquery.job.CopyJob`, - :class:`google.cloud.bigquery.job.ExtractJob`, - or :class:`google.cloud.bigquery.job.QueryJob` - :returns: the job instance, constructed via the resource + Returns: + Union[ \ + google.cloud.bigquery.job.LoadJob, \ + google.cloud.bigquery.job.CopyJob, \ + google.cloud.bigquery.job.ExtractJob, \ + google.cloud.bigquery.job.QueryJob \ + ]: + The job instance, constructed via the resource. """ config = resource.get("configuration", {}) if "load" in config: @@ -1145,10 +1145,12 @@ def get_job(self, job_id, project=None, location=None, retry=DEFAULT_RETRY): (Optional) How to retry the RPC. Returns: - Union[google.cloud.bigquery.job.LoadJob, \ - google.cloud.bigquery.job.CopyJob, \ - google.cloud.bigquery.job.ExtractJob, \ - google.cloud.bigquery.job.QueryJob]: + Union[ \ + google.cloud.bigquery.job.LoadJob, \ + google.cloud.bigquery.job.CopyJob, \ + google.cloud.bigquery.job.ExtractJob, \ + google.cloud.bigquery.job.QueryJob \ + ]: Job instance, based on the resource returned by the API. """ extra_params = {"projection": "full"} @@ -1176,7 +1178,7 @@ def cancel_job(self, job_id, project=None, location=None, retry=DEFAULT_RETRY): See https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/cancel - Arguments: + Args: job_id (str): Unique job identifier. Keyword Arguments: @@ -1188,10 +1190,12 @@ def cancel_job(self, job_id, project=None, location=None, retry=DEFAULT_RETRY): (Optional) How to retry the RPC. Returns: - Union[google.cloud.bigquery.job.LoadJob, \ - google.cloud.bigquery.job.CopyJob, \ - google.cloud.bigquery.job.ExtractJob, \ - google.cloud.bigquery.job.QueryJob]: + Union[ \ + google.cloud.bigquery.job.LoadJob, \ + google.cloud.bigquery.job.CopyJob, \ + google.cloud.bigquery.job.ExtractJob, \ + google.cloud.bigquery.job.QueryJob, \ + ]: Job instance, based on the resource returned by the API. """ extra_params = {"projection": "full"} @@ -1216,6 +1220,7 @@ def cancel_job(self, job_id, project=None, location=None, retry=DEFAULT_RETRY): def list_jobs( self, project=None, + parent_job=None, max_results=None, page_token=None, all_users=None, @@ -1230,33 +1235,38 @@ def list_jobs( https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/list Args: - project (str, optional): + project (Optional[str]): Project ID to use for retreiving datasets. Defaults to the client's project. - max_results (int, optional): + parent_job (Optional[Union[ \ + google.cloud.bigquery.job._AsyncJob, \ + str, \ + ]]): + If set, retrieve only child jobs of the specified parent. + max_results (Optional[int]): Maximum number of jobs to return. - page_token (str, optional): + page_token (Optional[str]): Opaque marker for the next "page" of jobs. If not passed, the API will return the first page of jobs. The token marks the beginning of the iterator to be returned and the value of the ``page_token`` can be accessed at ``next_page_token`` of :class:`~google.api_core.page_iterator.HTTPIterator`. - all_users (bool, optional): + all_users (Optional[bool]): If true, include jobs owned by all users in the project. Defaults to :data:`False`. - state_filter (str, optional): + state_filter (Optional[str]): If set, include only jobs matching the given state. One of: * ``"done"`` * ``"pending"`` * ``"running"`` - retry (google.api_core.retry.Retry, optional): + retry (Optional[google.api_core.retry.Retry]): How to retry the RPC. - min_creation_time (datetime.datetime, optional): + min_creation_time (Optional[datetime.datetime]): Min value for job creation time. If set, only jobs created after or at this timestamp are returned. If the datetime has no time zone assumes UTC time. - max_creation_time (datetime.datetime, optional): + max_creation_time (Optional[datetime.datetime]): Max value for job creation time. If set, only jobs created before or at this timestamp are returned. If the datetime has no time zone assumes UTC time. @@ -1265,6 +1275,9 @@ def list_jobs( google.api_core.page_iterator.Iterator: Iterable of job instances. """ + if isinstance(parent_job, job._AsyncJob): + parent_job = parent_job.job_id + extra_params = { "allUsers": all_users, "stateFilter": state_filter, @@ -1275,6 +1288,7 @@ def list_jobs( google.cloud._helpers._millis_from_datetime(max_creation_time) ), "projection": "full", + "parentJobId": parent_job, } extra_params = { @@ -1310,15 +1324,15 @@ def load_table_from_uri( """Starts a job for loading data into a table from CloudStorage. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#jobconfigurationload Arguments: source_uris (Union[str, Sequence[str]]): URIs of data files to be loaded; in format ``gs:///``. destination (Union[ \ - :class:`~google.cloud.bigquery.table.Table`, \ - :class:`~google.cloud.bigquery.table.TableReference`, \ + google.cloud.bigquery.table.Table, \ + google.cloud.bigquery.table.TableReference, \ str, \ ]): Table into which data is to be loaded. If a string is passed @@ -1345,6 +1359,11 @@ def load_table_from_uri( Returns: google.cloud.bigquery.job.LoadJob: A new load job. + + Raises: + TypeError: + If ``job_config`` is not an instance of :class:`~google.cloud.bigquery.job.LoadJobConfig` + class. """ job_id = _make_job_id(job_id, job_id_prefix) @@ -1360,6 +1379,10 @@ def load_table_from_uri( source_uris = [source_uris] destination = _table_arg_to_table_ref(destination, default_project=self.project) + + if job_config: + _verify_job_config_type(job_config, google.cloud.bigquery.job.LoadJobConfig) + load_job = job.LoadJob(job_ref, source_uris, destination, self, job_config) load_job._begin(retry=retry) @@ -1386,8 +1409,8 @@ def load_table_from_file( Arguments: file_obj (file): A file handle opened in binary mode for reading. destination (Union[ \ - :class:`~google.cloud.bigquery.table.Table`, \ - :class:`~google.cloud.bigquery.table.TableReference`, \ + google.cloud.bigquery.table.Table, \ + google.cloud.bigquery.table.TableReference, \ str, \ ]): Table into which data is to be loaded. If a string is passed @@ -1426,6 +1449,10 @@ def load_table_from_file( If ``size`` is not passed in and can not be determined, or if the ``file_obj`` can be detected to be a file opened in text mode. + + TypeError: + If ``job_config`` is not an instance of :class:`~google.cloud.bigquery.job.LoadJobConfig` + class. """ job_id = _make_job_id(job_id, job_id_prefix) @@ -1437,6 +1464,8 @@ def load_table_from_file( destination = _table_arg_to_table_ref(destination, default_project=self.project) job_ref = job._JobReference(job_id, project=project, location=location) + if job_config: + _verify_job_config_type(job_config, google.cloud.bigquery.job.LoadJobConfig) load_job = job.LoadJob(job_ref, None, destination, self, job_config) job_resource = load_job.to_api_repr() @@ -1491,19 +1520,19 @@ def load_table_from_dataframe( :func:`google.cloud.bigquery.table.TableReference.from_string`. Keyword Arguments: - num_retries (int, optional): Number of upload retries. - job_id (str, optional): Name of the job. - job_id_prefix (str, optional): + num_retries (Optional[int]): Number of upload retries. + job_id (Optional[str]): Name of the job. + job_id_prefix (Optional[str]): The user-provided prefix for a randomly generated job ID. This parameter will be ignored if a ``job_id`` is also given. location (str): Location where to run the job. Must match the location of the destination table. - project (str, optional): + project (Optional[str]): Project ID of the project of where to run the job. Defaults to the client's project. - job_config (~google.cloud.bigquery.job.LoadJobConfig, optional): + job_config (Optional[google.cloud.bigquery.job.LoadJobConfig]): Extra configuration options for the job. To override the default pandas data type conversions, supply @@ -1535,16 +1564,22 @@ def load_table_from_dataframe( If a usable parquet engine cannot be found. This method requires :mod:`pyarrow` or :mod:`fastparquet` to be installed. + TypeError: + If ``job_config`` is not an instance of :class:`~google.cloud.bigquery.job.LoadJobConfig` + class. """ job_id = _make_job_id(job_id, job_id_prefix) - if job_config is None: - job_config = job.LoadJobConfig() - else: + if job_config: + _verify_job_config_type(job_config, google.cloud.bigquery.job.LoadJobConfig) # Make a copy so that the job config isn't modified in-place. job_config_properties = copy.deepcopy(job_config._properties) job_config = job.LoadJobConfig() job_config._properties = job_config_properties + + else: + job_config = job.LoadJobConfig() + job_config.source_format = job.SourceFormat.PARQUET if location is None: @@ -1640,13 +1675,29 @@ def load_table_from_json( ): """Upload the contents of a table from a JSON string or dict. - Arguments: + Args: json_rows (Iterable[Dict[str, Any]]): Row data to be inserted. Keys must match the table schema fields and values must be JSON-compatible representations. + + .. note:: + + If your data is already a newline-delimited JSON string, + it is best to wrap it into a file-like object and pass it + to :meth:`~google.cloud.bigquery.client.Client.load_table_from_file`:: + + import io + from google.cloud import bigquery + + data = u'{"foo": "bar"}' + data_as_file = io.StringIO(data) + + client = bigquery.Client() + client.load_table_from_file(data_as_file, ...) + destination (Union[ \ - :class:`~google.cloud.bigquery.table.Table`, \ - :class:`~google.cloud.bigquery.table.TableReference`, \ + google.cloud.bigquery.table.Table, \ + google.cloud.bigquery.table.TableReference, \ str, \ ]): Table into which data is to be loaded. If a string is passed @@ -1655,7 +1706,7 @@ def load_table_from_json( :func:`google.cloud.bigquery.table.TableReference.from_string`. Keyword Arguments: - num_retries (int, optional): Number of upload retries. + num_retries (Optional[int]): Number of upload retries. job_id (str): (Optional) Name of the job. job_id_prefix (str): (Optional) the user-provided prefix for a randomly generated @@ -1674,14 +1725,21 @@ def load_table_from_json( Returns: google.cloud.bigquery.job.LoadJob: A new load job. + + Raises: + TypeError: + If ``job_config`` is not an instance of :class:`~google.cloud.bigquery.job.LoadJobConfig` + class. """ job_id = _make_job_id(job_id, job_id_prefix) - if job_config is None: - job_config = job.LoadJobConfig() - else: + if job_config: + _verify_job_config_type(job_config, google.cloud.bigquery.job.LoadJobConfig) # Make a copy so that the job config isn't modified in-place. job_config = copy.deepcopy(job_config) + else: + job_config = job.LoadJobConfig() + job_config.source_format = job.SourceFormat.NEWLINE_DELIMITED_JSON if job_config.schema is None: @@ -1712,19 +1770,19 @@ def load_table_from_json( def _do_resumable_upload(self, stream, metadata, num_retries): """Perform a resumable upload. - :type stream: IO[bytes] - :param stream: A bytes IO object open for reading. + Args: + stream (IO[bytes]): A bytes IO object open for reading. - :type metadata: dict - :param metadata: The metadata associated with the upload. + metadata (Dict): The metadata associated with the upload. - :type num_retries: int - :param num_retries: Number of upload retries. (Deprecated: This - argument will be removed in a future release.) + num_retries (int): + Number of upload retries. (Deprecated: This + argument will be removed in a future release.) - :rtype: :class:`~requests.Response` - :returns: The "200 OK" response object returned after the final chunk - is uploaded. + Returns: + requests.Response: + The "200 OK" response object returned after the final chunk + is uploaded. """ upload, transport = self._initiate_resumable_upload( stream, metadata, num_retries @@ -1738,23 +1796,22 @@ def _do_resumable_upload(self, stream, metadata, num_retries): def _initiate_resumable_upload(self, stream, metadata, num_retries): """Initiate a resumable upload. - :type stream: IO[bytes] - :param stream: A bytes IO object open for reading. + Args: + stream (IO[bytes]): A bytes IO object open for reading. - :type metadata: dict - :param metadata: The metadata associated with the upload. + metadata (Dict): The metadata associated with the upload. - :type num_retries: int - :param num_retries: Number of upload retries. (Deprecated: This - argument will be removed in a future release.) + num_retries (int): + Number of upload retries. (Deprecated: This + argument will be removed in a future release.) - :rtype: tuple - :returns: - Pair of + Returns: + Tuple: + Pair of - * The :class:`~google.resumable_media.requests.ResumableUpload` - that was created - * The ``transport`` used to initiate the upload. + * The :class:`~google.resumable_media.requests.ResumableUpload` + that was created + * The ``transport`` used to initiate the upload. """ chunk_size = _DEFAULT_CHUNKSIZE transport = self._http @@ -1778,26 +1835,29 @@ def _initiate_resumable_upload(self, stream, metadata, num_retries): def _do_multipart_upload(self, stream, metadata, size, num_retries): """Perform a multipart upload. - :type stream: IO[bytes] - :param stream: A bytes IO object open for reading. + Args: + stream (IO[bytes]): A bytes IO object open for reading. + + metadata (Dict): The metadata associated with the upload. - :type metadata: dict - :param metadata: The metadata associated with the upload. + size (int): + The number of bytes to be uploaded (which will be read + from ``stream``). If not provided, the upload will be + concluded once ``stream`` is exhausted (or :data:`None`). - :type size: int - :param size: The number of bytes to be uploaded (which will be read - from ``stream``). If not provided, the upload will be - concluded once ``stream`` is exhausted (or :data:`None`). + num_retries (int): + Number of upload retries. (Deprecated: This + argument will be removed in a future release.) - :type num_retries: int - :param num_retries: Number of upload retries. (Deprecated: This - argument will be removed in a future release.) + Returns: + requests.Response: + The "200 OK" response object returned after the multipart + upload request. - :rtype: :class:`~requests.Response` - :returns: The "200 OK" response object returned after the multipart - upload request. - :raises: :exc:`ValueError` if the ``stream`` has fewer than ``size`` - bytes remaining. + Raises: + ValueError: + if the ``stream`` has fewer than ``size`` + bytes remaining. """ data = stream.read(size) if len(data) < size: @@ -1832,25 +1892,25 @@ def copy_table( """Copy one or more tables to another table. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.copy + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#jobconfigurationtablecopy - Arguments: + Args: sources (Union[ \ - :class:`~google.cloud.bigquery.table.Table`, \ - :class:`~google.cloud.bigquery.table.TableReference`, \ + google.cloud.bigquery.table.Table, \ + google.cloud.bigquery.table.TableReference, \ str, \ Sequence[ \ Union[ \ - :class:`~google.cloud.bigquery.table.Table`, \ - :class:`~google.cloud.bigquery.table.TableReference`, \ + google.cloud.bigquery.table.Table, \ + google.cloud.bigquery.table.TableReference, \ str, \ ] \ ], \ ]): Table or tables to be copied. - destination (Union[ - :class:`~google.cloud.bigquery.table.Table`, \ - :class:`~google.cloud.bigquery.table.TableReference`, \ + destination (Union[ \ + google.cloud.bigquery.table.Table, \ + google.cloud.bigquery.table.TableReference, \ str, \ ]): Table into which data is to be copied. @@ -1874,6 +1934,11 @@ def copy_table( Returns: google.cloud.bigquery.job.CopyJob: A new copy job instance. + + Raises: + TypeError: + If ``job_config`` is not an instance of :class:`~google.cloud.bigquery.job.CopyJobConfig` + class. """ job_id = _make_job_id(job_id, job_id_prefix) @@ -1902,6 +1967,8 @@ def copy_table( destination = _table_arg_to_table_ref(destination, default_project=self.project) + if job_config: + _verify_job_config_type(job_config, google.cloud.bigquery.job.CopyJobConfig) copy_job = job.CopyJob( job_ref, sources, destination, client=self, job_config=job_config ) @@ -1923,12 +1990,12 @@ def extract_table( """Start a job to extract a table into Cloud Storage files. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.extract + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#jobconfigurationextract - Arguments: + Args: source (Union[ \ - :class:`google.cloud.bigquery.table.Table`, \ - :class:`google.cloud.bigquery.table.TableReference`, \ + google.cloud.bigquery.table.Table, \ + google.cloud.bigquery.table.TableReference, \ src, \ ]): Table to be extracted. @@ -1953,12 +2020,16 @@ def extract_table( (Optional) Extra configuration options for the job. retry (google.api_core.retry.Retry): (Optional) How to retry the RPC. - :type source: :class:`google.cloud.bigquery.table.TableReference` - :param source: table to be extracted. - + Args: + source (google.cloud.bigquery.table.TableReference): table to be extracted. Returns: google.cloud.bigquery.job.ExtractJob: A new extract job instance. + + Raises: + TypeError: + If ``job_config`` is not an instance of :class:`~google.cloud.bigquery.job.ExtractJobConfig` + class. """ job_id = _make_job_id(job_id, job_id_prefix) @@ -1974,6 +2045,10 @@ def extract_table( if isinstance(destination_uris, six.string_types): destination_uris = [destination_uris] + if job_config: + _verify_job_config_type( + job_config, google.cloud.bigquery.job.ExtractJobConfig + ) extract_job = job.ExtractJob( job_ref, source, destination_uris, client=self, job_config=job_config ) @@ -1994,9 +2069,9 @@ def query( """Run a SQL query. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.query + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#jobconfigurationquery - Arguments: + Args: query (str): SQL query to be executed. Defaults to the standard SQL dialect. Use the ``job_config`` parameter to change dialects. @@ -2023,6 +2098,11 @@ def query( Returns: google.cloud.bigquery.job.QueryJob: A new query job instance. + + Raises: + TypeError: + If ``job_config`` is not an instance of :class:`~google.cloud.bigquery.job.QueryJobConfig` + class. """ job_id = _make_job_id(job_id, job_id_prefix) @@ -2034,6 +2114,9 @@ def query( if self._default_query_job_config: if job_config: + _verify_job_config_type( + job_config, google.cloud.bigquery.job.QueryJobConfig + ) # anything that's not defined on the incoming # that is in the default, # should be filled in with the default @@ -2042,6 +2125,10 @@ def query( self._default_query_job_config ) else: + _verify_job_config_type( + self._default_query_job_config, + google.cloud.bigquery.job.QueryJobConfig, + ) job_config = self._default_query_job_config job_ref = job._JobReference(job_id, project=project, location=location) @@ -2058,27 +2145,22 @@ def insert_rows(self, table, rows, selected_fields=None, **kwargs): Args: table (Union[ \ - :class:`~google.cloud.bigquery.table.Table`, \ - :class:`~google.cloud.bigquery.table.TableReference`, \ + google.cloud.bigquery.table.Table, \ + google.cloud.bigquery.table.TableReference, \ str, \ ]): The destination table for the row data, or a reference to it. - rows (Union[ \ - Sequence[Tuple], \ - Sequence[dict], \ - ]): + rows (Union[Sequence[Tuple], Sequence[dict]]): Row data to be inserted. If a list of tuples is given, each tuple should contain data for each schema field on the current table and in the same order as the schema fields. If a list of dictionaries is given, the keys must include all required fields in the schema. Keys which do not correspond to a field in the schema are ignored. - selected_fields (Sequence[ \ - :class:`~google.cloud.bigquery.schema.SchemaField`, \ - ]): + selected_fields (Sequence[google.cloud.bigquery.schema.SchemaField]): The fields to return. Required if ``table`` is a :class:`~google.cloud.bigquery.table.TableReference`. - kwargs (dict): + kwargs (Dict): Keyword arguments to :meth:`~google.cloud.bigquery.client.Client.insert_rows_json`. @@ -2121,21 +2203,19 @@ def insert_rows_from_dataframe( Args: table (Union[ \ - :class:`~google.cloud.bigquery.table.Table`, \ - :class:`~google.cloud.bigquery.table.TableReference`, \ + google.cloud.bigquery.table.Table, \ + google.cloud.bigquery.table.TableReference, \ str, \ ]): The destination table for the row data, or a reference to it. dataframe (pandas.DataFrame): A :class:`~pandas.DataFrame` containing the data to load. - selected_fields (Sequence[ \ - :class:`~google.cloud.bigquery.schema.SchemaField`, \ - ]): + selected_fields (Sequence[google.cloud.bigquery.schema.SchemaField]): The fields to return. Required if ``table`` is a :class:`~google.cloud.bigquery.table.TableReference`. chunk_size (int): The number of rows to stream in a single chunk. Must be positive. - kwargs (dict): + kwargs (Dict): Keyword arguments to :meth:`~google.cloud.bigquery.client.Client.insert_rows_json`. @@ -2180,33 +2260,36 @@ def insert_rows_json( See https://cloud.google.com/bigquery/docs/reference/rest/v2/tabledata/insertAll - table (Union[ \ - :class:`~google.cloud.bigquery.table.Table` \ - :class:`~google.cloud.bigquery.table.TableReference`, \ - str, \ - ]): - The destination table for the row data, or a reference to it. - json_rows (Sequence[dict]): - Row data to be inserted. Keys must match the table schema fields - and values must be JSON-compatible representations. - row_ids (Sequence[str]): - (Optional) Unique ids, one per row being inserted. If omitted, - unique IDs are created. - skip_invalid_rows (bool): - (Optional) Insert all valid rows of a request, even if invalid - rows exist. The default value is False, which causes the entire - request to fail if any invalid rows exist. - ignore_unknown_values (bool): - (Optional) Accept rows that contain values that do not match the - schema. The unknown values are ignored. Default is False, which - treats unknown values as errors. - template_suffix (str): - (Optional) treat ``name`` as a template table and provide a suffix. - BigQuery will create the table `` + `` based - on the schema of the template table. See - https://cloud.google.com/bigquery/streaming-data-into-bigquery#template-tables - retry (:class:`google.api_core.retry.Retry`): - (Optional) How to retry the RPC. + Args: + table (Union[ \ + google.cloud.bigquery.table.Table \ + google.cloud.bigquery.table.TableReference, \ + str \ + ]): + The destination table for the row data, or a reference to it. + json_rows (Sequence[Dict]): + Row data to be inserted. Keys must match the table schema fields + and values must be JSON-compatible representations. + row_ids (Optional[Sequence[Optional[str]]]): + Unique IDs, one per row being inserted. An ID can also be + ``None``, indicating that an explicit insert ID should **not** + be used for that row. If the argument is omitted altogether, + unique IDs are created automatically. + skip_invalid_rows (Optional[bool]): + Insert all valid rows of a request, even if invalid rows exist. + The default value is ``False``, which causes the entire request + to fail if any invalid rows exist. + ignore_unknown_values (Optional[bool]): + Accept rows that contain values that do not match the schema. + The unknown values are ignored. Default is ``False``, which + treats unknown values as errors. + template_suffix (Optional[str]): + Treat ``name`` as a template table and provide a suffix. + BigQuery will create the table `` + `` + based on the schema of the template table. See + https://cloud.google.com/bigquery/streaming-data-into-bigquery#template-tables + retry (Optional[google.api_core.retry.Retry]): + How to retry the RPC. Returns: Sequence[Mappings]: @@ -2252,10 +2335,10 @@ def insert_rows_json( def list_partitions(self, table, retry=DEFAULT_RETRY): """List the partitions in a table. - Arguments: + Args: table (Union[ \ - :class:`~google.cloud.bigquery.table.Table`, \ - :class:`~google.cloud.bigquery.table.TableReference`, \ + google.cloud.bigquery.table.Table, \ + google.cloud.bigquery.table.TableReference, \ str, \ ]): The table or reference from which to get partition info @@ -2304,18 +2387,16 @@ def list_rows( Args: table (Union[ \ - :class:`~google.cloud.bigquery.table.Table`, \ - :class:`~google.cloud.bigquery.table.TableListItem`, \ - :class:`~google.cloud.bigquery.table.TableReference`, \ + google.cloud.bigquery.table.Table, \ + google.cloud.bigquery.table.TableListItem, \ + google.cloud.bigquery.table.TableReference, \ str, \ ]): The table to list, or a reference to it. When the table object does not contain a schema and ``selected_fields`` is not supplied, this method calls ``get_table`` to fetch the table schema. - selected_fields (Sequence[ \ - :class:`~google.cloud.bigquery.schema.SchemaField` \ - ]): + selected_fields (Sequence[google.cloud.bigquery.schema.SchemaField]): The fields to return. If not supplied, data for all columns are downloaded. max_results (int): @@ -2333,7 +2414,7 @@ def list_rows( Optional. The maximum number of rows in each page of results from this request. Non-positive values are ignored. Defaults to a sensible value set by the API. - retry (:class:`google.api_core.retry.Retry`): + retry (google.api_core.retry.Retry): (Optional) How to retry the RPC. Returns: @@ -2433,14 +2514,13 @@ def schema_to_json(self, schema_list, destination): def _item_to_project(iterator, resource): """Convert a JSON project to the native object. - :type iterator: :class:`~google.api_core.page_iterator.Iterator` - :param iterator: The iterator that is currently in use. + Args: + iterator (google.api_core.page_iterator.Iterator): The iterator that is currently in use. - :type resource: dict - :param resource: An item to be converted to a project. + resource (Dict): An item to be converted to a project. - :rtype: :class:`.Project` - :returns: The next project in the page. + Returns: + google.cloud.bigquery.client.Project: The next project in the page. """ return Project.from_api_repr(resource) @@ -2451,14 +2531,13 @@ def _item_to_project(iterator, resource): def _item_to_dataset(iterator, resource): """Convert a JSON dataset to the native object. - :type iterator: :class:`~google.api_core.page_iterator.Iterator` - :param iterator: The iterator that is currently in use. + Args: + iterator (google.api_core.page_iterator.Iterator): The iterator that is currently in use. - :type resource: dict - :param resource: An item to be converted to a dataset. + resource (Dict): An item to be converted to a dataset. - :rtype: :class:`.DatasetListItem` - :returns: The next dataset in the page. + Returns: + google.cloud.bigquery.dataset.DatasetListItem: The next dataset in the page. """ return DatasetListItem(resource) @@ -2466,14 +2545,13 @@ def _item_to_dataset(iterator, resource): def _item_to_job(iterator, resource): """Convert a JSON job to the native object. - :type iterator: :class:`~google.api_core.page_iterator.Iterator` - :param iterator: The iterator that is currently in use. + Args: + iterator (google.api_core.page_iterator.Iterator): The iterator that is currently in use. - :type resource: dict - :param resource: An item to be converted to a job. + resource (Dict): An item to be converted to a job. - :rtype: job instance. - :returns: The next job in the page. + Returns: + job instance: The next job in the page. """ return iterator.client.job_from_resource(resource) @@ -2484,8 +2562,7 @@ def _item_to_model(iterator, resource): Args: iterator (google.api_core.page_iterator.Iterator): The iterator that is currently in use. - resource (dict): - An item to be converted to a model. + resource (Dict): An item to be converted to a model. Returns: google.cloud.bigquery.model.Model: The next model in the page. @@ -2499,8 +2576,7 @@ def _item_to_routine(iterator, resource): Args: iterator (google.api_core.page_iterator.Iterator): The iterator that is currently in use. - resource (dict): - An item to be converted to a routine. + resource (Dict): An item to be converted to a routine. Returns: google.cloud.bigquery.routine.Routine: The next routine in the page. @@ -2511,14 +2587,13 @@ def _item_to_routine(iterator, resource): def _item_to_table(iterator, resource): """Convert a JSON table to the native object. - :type iterator: :class:`~google.api_core.page_iterator.Iterator` - :param iterator: The iterator that is currently in use. + Args: + iterator (google.api_core.page_iterator.Iterator): The iterator that is currently in use. - :type resource: dict - :param resource: An item to be converted to a table. + resource (Dict): An item to be converted to a table. - :rtype: :class:`~google.cloud.bigquery.table.Table` - :returns: The next table in the page. + Returns: + google.cloud.bigquery.table.Table: The next table in the page. """ return TableListItem(resource) @@ -2526,14 +2601,13 @@ def _item_to_table(iterator, resource): def _make_job_id(job_id, prefix=None): """Construct an ID for a new job. - :type job_id: str or ``NoneType`` - :param job_id: the user-provided job ID + Args: + job_id (Optional[str]): the user-provided job ID. - :type prefix: str or ``NoneType`` - :param prefix: (Optional) the user-provided prefix for a job ID + prefix (Optional[str]): the user-provided prefix for a job ID. - :rtype: str - :returns: A job ID + Returns: + str: A job ID """ if job_id is not None: return job_id @@ -2546,11 +2620,13 @@ def _make_job_id(job_id, prefix=None): def _check_mode(stream): """Check that a stream was opened in read-binary mode. - :type stream: IO[bytes] - :param stream: A bytes IO object open for reading. + Args: + stream (IO[bytes]): A bytes IO object open for reading. - :raises: :exc:`ValueError` if the ``stream.mode`` is a valid attribute - and is not among ``rb``, ``r+b`` or ``rb+``. + Raises: + ValueError: + if the ``stream.mode`` is a valid attribute + and is not among ``rb``, ``r+b`` or ``rb+``. """ mode = getattr(stream, "mode", None) @@ -2571,11 +2647,11 @@ def _check_mode(stream): def _get_upload_headers(user_agent): """Get the headers for an upload request. - :type user_agent: str - :param user_agent: The user-agent for requests. + Args: + user_agent (str): The user-agent for requests. - :rtype: dict - :returns: The headers to be used for the request. + Returns: + Dict: The headers to be used for the request. """ return { "Accept": "application/json", diff --git a/bigquery/google/cloud/bigquery/dataset.py b/bigquery/google/cloud/bigquery/dataset.py index 67a7353f94e7..754a2fa00d00 100644 --- a/bigquery/google/cloud/bigquery/dataset.py +++ b/bigquery/google/cloud/bigquery/dataset.py @@ -24,6 +24,7 @@ from google.cloud.bigquery.model import ModelReference from google.cloud.bigquery.routine import RoutineReference from google.cloud.bigquery.table import TableReference +from google.cloud.bigquery.encryption_configuration import EncryptionConfiguration def _get_table_reference(self, table_id): @@ -207,7 +208,7 @@ class DatasetReference(object): """DatasetReferences are pointers to datasets. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets + https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets#datasetreference Args: project (str): The ID of the project @@ -346,13 +347,10 @@ class Dataset(object): """Datasets are containers for tables. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets + https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets#resource-dataset Args: - dataset_ref (Union[ \ - :class:`~google.cloud.bigquery.dataset.DatasetReference`, \ - str, \ - ]): + dataset_ref (Union[google.cloud.bigquery.dataset.DatasetReference, str]): A pointer to a dataset. If ``dataset_ref`` is a string, it must include both the project ID and the dataset ID, separated by ``.``. @@ -361,8 +359,10 @@ class Dataset(object): _PROPERTY_TO_API_FIELD = { "access_entries": "access", "created": "creationTime", + "default_partition_expiration_ms": "defaultPartitionExpirationMs", "default_table_expiration_ms": "defaultTableExpirationMs", "friendly_name": "friendlyName", + "default_encryption_configuration": "defaultEncryptionConfiguration", } def __init__(self, dataset_ref): @@ -463,6 +463,34 @@ def self_link(self): """ return self._properties.get("selfLink") + @property + def default_partition_expiration_ms(self): + """Optional[int]: The default partition expiration for all + partitioned tables in the dataset, in milliseconds. + + Once this property is set, all newly-created partitioned tables in + the dataset will have an ``time_paritioning.expiration_ms`` property + set to this value, and changing the value will only affect new + tables, not existing ones. The storage in a partition will have an + expiration time of its partition time plus this value. + + Setting this property overrides the use of + ``default_table_expiration_ms`` for partitioned tables: only one of + ``default_table_expiration_ms`` and + ``default_partition_expiration_ms`` will be used for any new + partitioned table. If you provide an explicit + ``time_partitioning.expiration_ms`` when creating or updating a + partitioned table, that value takes precedence over the default + partition expiration time indicated by this property. + """ + return _helpers._int_or_none( + self._properties.get("defaultPartitionExpirationMs") + ) + + @default_partition_expiration_ms.setter + def default_partition_expiration_ms(self, value): + self._properties["defaultPartitionExpirationMs"] = _helpers._str_or_none(value) + @property def default_table_expiration_ms(self): """Union[int, None]: Default expiration time for tables in the dataset @@ -547,6 +575,30 @@ def labels(self, value): raise ValueError("Pass a dict") self._properties["labels"] = value + @property + def default_encryption_configuration(self): + """google.cloud.bigquery.encryption_configuration.EncryptionConfiguration: Custom + encryption configuration for all tables in the dataset. + + Custom encryption configuration (e.g., Cloud KMS keys) or :data:`None` + if using default encryption. + + See `protecting data with Cloud KMS keys + `_ + in the BigQuery documentation. + """ + prop = self._properties.get("defaultEncryptionConfiguration") + if prop: + prop = EncryptionConfiguration.from_api_repr(prop) + return prop + + @default_encryption_configuration.setter + def default_encryption_configuration(self, value): + api_repr = value + if value: + api_repr = value.to_api_repr() + self._properties["defaultEncryptionConfiguration"] = api_repr + @classmethod def from_string(cls, full_dataset_id): """Construct a dataset from fully-qualified dataset ID. diff --git a/bigquery/google/cloud/bigquery/dbapi/_helpers.py b/bigquery/google/cloud/bigquery/dbapi/_helpers.py index e5f4cff51666..651880feac90 100644 --- a/bigquery/google/cloud/bigquery/dbapi/_helpers.py +++ b/bigquery/google/cloud/bigquery/dbapi/_helpers.py @@ -30,18 +30,21 @@ def scalar_to_query_parameter(value, name=None): """Convert a scalar value into a query parameter. - :type value: any - :param value: A scalar value to convert into a query parameter. + Args: + value (Any): + A scalar value to convert into a query parameter. - :type name: str - :param name: (Optional) Name of the query parameter. + name (str): + (Optional) Name of the query parameter. - :rtype: :class:`~google.cloud.bigquery.ScalarQueryParameter` - :returns: - A query parameter corresponding with the type and value of the plain - Python object. - :raises: :class:`~google.cloud.bigquery.dbapi.exceptions.ProgrammingError` - if the type cannot be determined. + Returns: + google.cloud.bigquery.ScalarQueryParameter: + A query parameter corresponding with the type and value of the plain + Python object. + + Raises: + google.cloud.bigquery.dbapi.exceptions.ProgrammingError: + if the type cannot be determined. """ parameter_type = bigquery_scalar_type(value) @@ -67,8 +70,8 @@ def array_to_query_parameter(value, name=None): Python object. Raises: - :class:`~google.cloud.bigquery.dbapi.exceptions.ProgrammingError` - if the type of array elements cannot be determined. + google.cloud.bigquery.dbapi.exceptions.ProgrammingError: + if the type of array elements cannot be determined. """ if not array_like(value): raise exceptions.ProgrammingError( @@ -97,11 +100,12 @@ def array_to_query_parameter(value, name=None): def to_query_parameters_list(parameters): """Converts a sequence of parameter values into query parameters. - :type parameters: Sequence[Any] - :param parameters: Sequence of query parameter values. + Args: + parameters (Sequence[Any]): Sequence of query parameter values. - :rtype: List[google.cloud.bigquery.query._AbstractQueryParameter] - :returns: A list of query parameters. + Returns: + List[google.cloud.bigquery.query._AbstractQueryParameter]: + A list of query parameters. """ result = [] @@ -120,11 +124,12 @@ def to_query_parameters_list(parameters): def to_query_parameters_dict(parameters): """Converts a dictionary of parameter values into query parameters. - :type parameters: Mapping[str, Any] - :param parameters: Dictionary of query parameter values. + Args: + parameters (Mapping[str, Any]): Dictionary of query parameter values. - :rtype: List[google.cloud.bigquery.query._AbstractQueryParameter] - :returns: A list of named query parameters. + Returns: + List[google.cloud.bigquery.query._AbstractQueryParameter]: + A list of named query parameters. """ result = [] @@ -146,11 +151,13 @@ def to_query_parameters_dict(parameters): def to_query_parameters(parameters): """Converts DB-API parameter values into query parameters. - :type parameters: Mapping[str, Any] or Sequence[Any] - :param parameters: A dictionary or sequence of query parameter values. + Args: + parameters (Union[Mapping[str, Any], Sequence[Any]]): + A dictionary or sequence of query parameter values. - :rtype: List[google.cloud.bigquery.query._AbstractQueryParameter] - :returns: A list of query parameters. + Returns: + List[google.cloud.bigquery.query._AbstractQueryParameter]: + A list of query parameters. """ if parameters is None: return [] diff --git a/bigquery/google/cloud/bigquery/dbapi/connection.py b/bigquery/google/cloud/bigquery/dbapi/connection.py index 0dbc9143b255..ee7d0dc3cc59 100644 --- a/bigquery/google/cloud/bigquery/dbapi/connection.py +++ b/bigquery/google/cloud/bigquery/dbapi/connection.py @@ -21,8 +21,8 @@ class Connection(object): """DB-API Connection to Google BigQuery. - :type client: :class:`~google.cloud.bigquery.Client` - :param client: A client used to connect to BigQuery. + Args: + client (google.cloud.bigquery.Client): A client used to connect to BigQuery. """ def __init__(self, client): @@ -37,8 +37,8 @@ def commit(self): def cursor(self): """Return a new cursor object. - :rtype: :class:`~google.cloud.bigquery.dbapi.Cursor` - :returns: A DB-API cursor that uses this connection. + Returns: + google.cloud.bigquery.dbapi.Cursor: A DB-API cursor that uses this connection. """ return cursor.Cursor(self) @@ -46,13 +46,13 @@ def cursor(self): def connect(client=None): """Construct a DB-API connection to Google BigQuery. - :type client: :class:`~google.cloud.bigquery.Client` - :param client: - (Optional) A client used to connect to BigQuery. If not passed, a - client is created using default options inferred from the environment. + Args: + client (google.cloud.bigquery.Client): + (Optional) A client used to connect to BigQuery. If not passed, a + client is created using default options inferred from the environment. - :rtype: :class:`~google.cloud.bigquery.dbapi.Connection` - :returns: A new DB-API connection to BigQuery. + Returns: + google.cloud.bigquery.dbapi.Connection: A new DB-API connection to BigQuery. """ if client is None: client = bigquery.Client() diff --git a/bigquery/google/cloud/bigquery/dbapi/cursor.py b/bigquery/google/cloud/bigquery/dbapi/cursor.py index 9b7a895b367f..a3e6ea5be87e 100644 --- a/bigquery/google/cloud/bigquery/dbapi/cursor.py +++ b/bigquery/google/cloud/bigquery/dbapi/cursor.py @@ -49,8 +49,9 @@ class Cursor(object): """DB-API Cursor to Google BigQuery. - :type connection: :class:`~google.cloud.bigquery.dbapi.Connection` - :param connection: A DB-API connection to Google BigQuery. + Args: + connection (google.cloud.bigquery.dbapi.Connection): + A DB-API connection to Google BigQuery. """ def __init__(self, connection): @@ -74,8 +75,9 @@ def close(self): def _set_description(self, schema): """Set description from schema. - :type schema: Sequence[google.cloud.bigquery.schema.SchemaField] - :param schema: A description of fields in the schema. + Args: + schema (Sequence[google.cloud.bigquery.schema.SchemaField]): + A description of fields in the schema. """ if schema is None: self.description = None @@ -103,9 +105,9 @@ def _set_rowcount(self, query_results): query, but if it was a DML statement, it sets rowcount to the number of modified rows. - :type query_results: - :class:`~google.cloud.bigquery.query._QueryResults` - :param query_results: results of a query + Args: + query_results (google.cloud.bigquery.query._QueryResults): + Results of a query. """ total_rows = 0 num_dml_affected_rows = query_results.num_dml_affected_rows @@ -138,19 +140,18 @@ def execute(self, operation, parameters=None, job_id=None, job_config=None): yet supported. See: https://github.com/GoogleCloudPlatform/google-cloud-python/issues/3524 - :type operation: str - :param operation: A Google BigQuery query string. + Args: + operation (str): A Google BigQuery query string. - :type parameters: Mapping[str, Any] or Sequence[Any] - :param parameters: - (Optional) dictionary or sequence of parameter values. + parameters (Union[Mapping[str, Any], Sequence[Any]]): + (Optional) dictionary or sequence of parameter values. - :type job_id: str - :param job_id: (Optional) The job_id to use. If not set, a job ID - is generated at random. + job_id (str): + (Optional) The job_id to use. If not set, a job ID + is generated at random. - :type job_config: :class:`~google.cloud.bigquery.job.QueryJobConfig` - :param job_config: (Optional) Extra configuration options for the query job. + job_config (google.cloud.bigquery.job.QueryJobConfig): + (Optional) Extra configuration options for the query job. """ self._query_data = None self._query_job = None @@ -182,11 +183,11 @@ def execute(self, operation, parameters=None, job_id=None, job_config=None): def executemany(self, operation, seq_of_parameters): """Prepare and execute a database operation multiple times. - :type operation: str - :param operation: A Google BigQuery query string. + Args: + operation (str): A Google BigQuery query string. - :type seq_of_parameters: Sequence[Mapping[str, Any] or Sequence[Any]] - :param parameters: Sequence of many sets of parameter values. + seq_of_parameters (Union[Sequence[Mapping[str, Any], Sequence[Any]]]): + Sequence of many sets of parameter values. """ for parameters in seq_of_parameters: self.execute(operation, parameters) @@ -221,12 +222,13 @@ def _try_fetch(self, size=None): def fetchone(self): """Fetch a single row from the results of the last ``execute*()`` call. - :rtype: tuple - :returns: - A tuple representing a row or ``None`` if no more data is - available. - :raises: :class:`~google.cloud.bigquery.dbapi.InterfaceError` - if called before ``execute()``. + Returns: + Tuple: + A tuple representing a row or ``None`` if no more data is + available. + + Raises: + google.cloud.bigquery.dbapi.InterfaceError: if called before ``execute()``. """ self._try_fetch() try: @@ -242,16 +244,17 @@ def fetchmany(self, size=None): Set the ``arraysize`` attribute before calling ``execute()`` to set the batch size. - :type size: int - :param size: - (Optional) Maximum number of rows to return. Defaults to the - ``arraysize`` property value. If ``arraysize`` is not set, it - defaults to ``1``. + Args: + size (int): + (Optional) Maximum number of rows to return. Defaults to the + ``arraysize`` property value. If ``arraysize`` is not set, it + defaults to ``1``. + + Returns: + List[Tuple]: A list of rows. - :rtype: List[tuple] - :returns: A list of rows. - :raises: :class:`~google.cloud.bigquery.dbapi.InterfaceError` - if called before ``execute()``. + Raises: + google.cloud.bigquery.dbapi.InterfaceError: if called before ``execute()``. """ if size is None: # Since self.arraysize can be None (a deviation from PEP 249), @@ -272,10 +275,11 @@ def fetchmany(self, size=None): def fetchall(self): """Fetch all remaining results from the last ``execute*()`` call. - :rtype: List[tuple] - :returns: A list of all the rows in the results. - :raises: :class:`~google.cloud.bigquery.dbapi.InterfaceError` - if called before ``execute()``. + Returns: + List[Tuple]: A list of all the rows in the results. + + Raises: + google.cloud.bigquery.dbapi.InterfaceError: if called before ``execute()``. """ self._try_fetch() return list(self._query_data) @@ -293,17 +297,18 @@ def _format_operation_list(operation, parameters): The input operation will be a query like ``SELECT %s`` and the output will be a query like ``SELECT ?``. - :type operation: str - :param operation: A Google BigQuery query string. + Args: + operation (str): A Google BigQuery query string. - :type parameters: Sequence[Any] - :param parameters: Sequence of parameter values. + parameters (Sequence[Any]): Sequence of parameter values. - :rtype: str - :returns: A formatted query string. - :raises: :class:`~google.cloud.bigquery.dbapi.ProgrammingError` - if a parameter used in the operation is not found in the - ``parameters`` argument. + Returns: + str: A formatted query string. + + Raises: + google.cloud.bigquery.dbapi.ProgrammingError: + if a parameter used in the operation is not found in the + ``parameters`` argument. """ formatted_params = ["?" for _ in parameters] @@ -319,17 +324,18 @@ def _format_operation_dict(operation, parameters): The input operation will be a query like ``SELECT %(namedparam)s`` and the output will be a query like ``SELECT @namedparam``. - :type operation: str - :param operation: A Google BigQuery query string. + Args: + operation (str): A Google BigQuery query string. + + parameters (Mapping[str, Any]): Dictionary of parameter values. - :type parameters: Mapping[str, Any] - :param parameters: Dictionary of parameter values. + Returns: + str: A formatted query string. - :rtype: str - :returns: A formatted query string. - :raises: :class:`~google.cloud.bigquery.dbapi.ProgrammingError` - if a parameter used in the operation is not found in the - ``parameters`` argument. + Raises: + google.cloud.bigquery.dbapi.ProgrammingError: + if a parameter used in the operation is not found in the + ``parameters`` argument. """ formatted_params = {} for name in parameters: @@ -345,17 +351,19 @@ def _format_operation_dict(operation, parameters): def _format_operation(operation, parameters=None): """Formats parameters in operation in way BigQuery expects. - :type: str - :param operation: A Google BigQuery query string. + Args: + operation (str): A Google BigQuery query string. + + parameters (Union[Mapping[str, Any], Sequence[Any]]): + Optional parameter values. - :type: Mapping[str, Any] or Sequence[Any] - :param parameters: Optional parameter values. + Returns: + str: A formatted query string. - :rtype: str - :returns: A formatted query string. - :raises: :class:`~google.cloud.bigquery.dbapi.ProgrammingError` - if a parameter used in the operation is not found in the - ``parameters`` argument. + Raises: + google.cloud.bigquery.dbapi.ProgrammingError: + if a parameter used in the operation is not found in the + ``parameters`` argument. """ if parameters is None: return operation diff --git a/bigquery/google/cloud/bigquery/dbapi/types.py b/bigquery/google/cloud/bigquery/dbapi/types.py index 3c8c454a011a..14917820cd38 100644 --- a/bigquery/google/cloud/bigquery/dbapi/types.py +++ b/bigquery/google/cloud/bigquery/dbapi/types.py @@ -33,11 +33,11 @@ def Binary(string): """Contruct a DB-API binary value. - :type string: str - :param string: A string to encode as a binary value. + Args: + string (str): A string to encode as a binary value. - :rtype: bytes - :returns: The UTF-8 encoded bytes representing the string. + Returns: + bytes: The UTF-8 encoded bytes representing the string. """ return string.encode("utf-8") @@ -45,16 +45,15 @@ def Binary(string): def TimeFromTicks(ticks, tz=None): """Construct a DB-API time value from the given ticks value. - :type ticks: float - :param ticks: - a number of seconds since the epoch; see the documentation of the - standard Python time module for details. + Args: + ticks (float): + a number of seconds since the epoch; see the documentation of the + standard Python time module for details. - :type tz: :class:`datetime.tzinfo` - :param tz: (Optional) time zone to use for conversion + tz (datetime.tzinfo): (Optional) time zone to use for conversion - :rtype: :class:`datetime.time` - :returns: time represented by ticks. + Returns: + datetime.time: time represented by ticks. """ dt = datetime.datetime.fromtimestamp(ticks, tz=tz) return dt.timetz() diff --git a/bigquery/google/cloud/bigquery/encryption_configuration.py b/bigquery/google/cloud/bigquery/encryption_configuration.py new file mode 100644 index 000000000000..ba04ae2c45a7 --- /dev/null +++ b/bigquery/google/cloud/bigquery/encryption_configuration.py @@ -0,0 +1,84 @@ +# Copyright 2015 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Define class for the custom encryption configuration.""" + +import copy + + +class EncryptionConfiguration(object): + """Custom encryption configuration (e.g., Cloud KMS keys). + + Args: + kms_key_name (str): resource ID of Cloud KMS key used for encryption + """ + + def __init__(self, kms_key_name=None): + self._properties = {} + if kms_key_name is not None: + self._properties["kmsKeyName"] = kms_key_name + + @property + def kms_key_name(self): + """str: Resource ID of Cloud KMS key + + Resource ID of Cloud KMS key or :data:`None` if using default + encryption. + """ + return self._properties.get("kmsKeyName") + + @kms_key_name.setter + def kms_key_name(self, value): + self._properties["kmsKeyName"] = value + + @classmethod + def from_api_repr(cls, resource): + """Construct an encryption configuration from its API representation + + Args: + resource (Dict[str, object]): + An encryption configuration representation as returned from + the API. + + Returns: + google.cloud.bigquery.table.EncryptionConfiguration: + An encryption configuration parsed from ``resource``. + """ + config = cls() + config._properties = copy.deepcopy(resource) + return config + + def to_api_repr(self): + """Construct the API resource representation of this encryption + configuration. + + Returns: + Dict[str, object]: + Encryption configuration as represented as an API resource + """ + return copy.deepcopy(self._properties) + + def __eq__(self, other): + if not isinstance(other, EncryptionConfiguration): + return NotImplemented + return self.kms_key_name == other.kms_key_name + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash(self.kms_key_name) + + def __repr__(self): + return "EncryptionConfiguration({})".format(self.kms_key_name) diff --git a/bigquery/google/cloud/bigquery/external_config.py b/bigquery/google/cloud/bigquery/external_config.py index 048c2178a654..c637d37d185c 100644 --- a/bigquery/google/cloud/bigquery/external_config.py +++ b/bigquery/google/cloud/bigquery/external_config.py @@ -26,6 +26,7 @@ from google.cloud.bigquery._helpers import _to_bytes from google.cloud.bigquery._helpers import _bytes_to_json from google.cloud.bigquery._helpers import _int_or_none +from google.cloud.bigquery._helpers import _str_or_none from google.cloud.bigquery.schema import SchemaField @@ -175,8 +176,7 @@ def from_api_repr(cls, resource): API. Returns: - :class:`~.external_config.BigtableColumn`: - Configuration parsed from ``resource``. + external_config.BigtableColumn: Configuration parsed from ``resource``. """ config = cls() config._properties = copy.deepcopy(resource) @@ -248,7 +248,7 @@ def type_(self, value): @property def columns(self): - """List[:class:`~.external_config.BigtableColumn`]: Lists of columns + """List[BigtableColumn]: Lists of columns that should be exposed as individual fields. See @@ -368,8 +368,7 @@ def from_api_repr(cls, resource): API. Returns: - :class:`~.external_config.BigtableOptions`: - Configuration parsed from ``resource``. + BigtableOptions: Configuration parsed from ``resource``. """ config = cls() config._properties = copy.deepcopy(resource) @@ -475,8 +474,7 @@ def to_api_repr(self): """Build an API representation of this object. Returns: - Dict[str, Any]: - A dictionary in the format used by the BigQuery API. + Dict[str, Any]: A dictionary in the format used by the BigQuery API. """ return copy.deepcopy(self._properties) @@ -492,8 +490,7 @@ def from_api_repr(cls, resource): API. Returns: - :class:`~.external_config.CSVOptions`: - Configuration parsed from ``resource``. + CSVOptions: Configuration parsed from ``resource``. """ config = cls() config._properties = copy.deepcopy(resource) @@ -524,12 +521,24 @@ def skip_leading_rows(self): def skip_leading_rows(self, value): self._properties["skipLeadingRows"] = str(value) + @property + def range(self): + """str: The range of a sheet that BigQuery will query from. + + See + https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#GoogleSheetsOptions + """ + return _str_or_none(self._properties.get("range")) + + @range.setter + def range(self, value): + self._properties["range"] = value + def to_api_repr(self): """Build an API representation of this object. Returns: - Dict[str, Any]: - A dictionary in the format used by the BigQuery API. + Dict[str, Any]: A dictionary in the format used by the BigQuery API. """ return copy.deepcopy(self._properties) @@ -545,8 +554,7 @@ def from_api_repr(cls, resource): API. Returns: - :class:`~.external_config.GoogleSheetsOptions`: - Configuration parsed from ``resource``. + GoogleSheetsOptions: Configuration parsed from ``resource``. """ config = cls() config._properties = copy.deepcopy(resource) @@ -560,7 +568,7 @@ class ExternalConfig(object): """Description of an external data source. Args: - source_format (:class:`~.external_config.ExternalSourceFormat`): + source_format (ExternalSourceFormat): See :attr:`source_format`. """ @@ -705,8 +713,7 @@ def from_api_repr(cls, resource): API. Returns: - :class:`~.external_config.ExternalConfig`: - Configuration parsed from ``resource``. + ExternalConfig: Configuration parsed from ``resource``. """ config = cls(resource["sourceFormat"]) for optcls in _OPTION_CLASSES: diff --git a/bigquery/google/cloud/bigquery/job.py b/bigquery/google/cloud/bigquery/job.py index ccbab8b5eb44..a8d797f4bef5 100644 --- a/bigquery/google/cloud/bigquery/job.py +++ b/bigquery/google/cloud/bigquery/job.py @@ -27,7 +27,9 @@ from google.cloud.bigquery.dataset import Dataset from google.cloud.bigquery.dataset import DatasetListItem from google.cloud.bigquery.dataset import DatasetReference +from google.cloud.bigquery.encryption_configuration import EncryptionConfiguration from google.cloud.bigquery.external_config import ExternalConfig +from google.cloud.bigquery import _helpers from google.cloud.bigquery.query import _query_param_from_api_repr from google.cloud.bigquery.query import ArrayQueryParameter from google.cloud.bigquery.query import ScalarQueryParameter @@ -36,13 +38,13 @@ from google.cloud.bigquery.retry import DEFAULT_RETRY from google.cloud.bigquery.routine import RoutineReference from google.cloud.bigquery.schema import SchemaField +from google.cloud.bigquery.schema import _to_schema_fields from google.cloud.bigquery.table import _EmptyRowIterator -from google.cloud.bigquery.table import EncryptionConfiguration +from google.cloud.bigquery.table import RangePartitioning from google.cloud.bigquery.table import _table_arg_to_table_ref from google.cloud.bigquery.table import TableReference from google.cloud.bigquery.table import Table from google.cloud.bigquery.table import TimePartitioning -from google.cloud.bigquery import _helpers _DONE_STATE = "DONE" _STOPPED_REASON = "stopped" @@ -80,11 +82,11 @@ def _error_result_to_exception(error_result): .. _troubleshooting errors: https://cloud.google.com/bigquery\ /troubleshooting-errors - :type error_result: Mapping[str, str] - :param error_result: The error result from BigQuery. + Args: + error_result (Mapping[str, str]): The error result from BigQuery. - :rtype google.cloud.exceptions.GoogleCloudError: - :returns: The mapped exception. + Returns: + google.cloud.exceptions.GoogleCloudError: The mapped exception. """ reason = error_result.get("reason") status_code = _ERROR_REASON_TO_EXCEPTION.get( @@ -332,12 +334,46 @@ def job_id(self): """str: ID of the job.""" return _helpers._get_sub_prop(self._properties, ["jobReference", "jobId"]) + @property + def parent_job_id(self): + """Return the ID of the parent job. + + See: + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobStatistics.FIELDS.parent_job_id + + Returns: + Optional[str]: parent job id. + """ + return _helpers._get_sub_prop(self._properties, ["statistics", "parentJobId"]) + + @property + def script_statistics(self): + resource = _helpers._get_sub_prop( + self._properties, ["statistics", "scriptStatistics"] + ) + if resource is None: + return None + return ScriptStatistics(resource) + + @property + def num_child_jobs(self): + """The number of child jobs executed. + + See: + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobStatistics.FIELDS.num_child_jobs + + Returns: + int + """ + count = _helpers._get_sub_prop(self._properties, ["statistics", "numChildJobs"]) + return int(count) if count is not None else 0 + @property def project(self): """Project bound to the job. - :rtype: str - :returns: the project (derived from the client). + Returns: + str: the project (derived from the client). """ return _helpers._get_sub_prop(self._properties, ["jobReference", "projectId"]) @@ -349,13 +385,14 @@ def location(self): def _require_client(self, client): """Check client or verify over-ride. - :type client: :class:`~google.cloud.bigquery.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current dataset. + Args: + client (Optional[google.cloud.bigquery.client.Client]): + the client to use. If not passed, falls back to the + ``client`` stored on the current dataset. - :rtype: :class:`google.cloud.bigquery.client.Client` - :returns: The client passed in or the currently bound client. + Returns: + google.cloud.bigquery.client.Client: + The client passed in or the currently bound client. """ if client is None: client = self._client @@ -363,10 +400,10 @@ def _require_client(self, client): @property def job_type(self): - """Type of job + """Type of job. - :rtype: str - :returns: one of 'load', 'copy', 'extract', 'query' + Returns: + str: one of 'load', 'copy', 'extract', 'query'. """ return self._JOB_TYPE @@ -374,8 +411,8 @@ def job_type(self): def path(self): """URL path for the job's APIs. - :rtype: str - :returns: the path based on project and job ID. + Returns: + str: the path based on project and job ID. """ return "/projects/%s/jobs/%s" % (self.project, self.job_id) @@ -388,8 +425,8 @@ def labels(self): def etag(self): """ETag for the job resource. - :rtype: str, or ``NoneType`` - :returns: the ETag (None until set from the server). + Returns: + Optional[str]: the ETag (None until set from the server). """ return self._properties.get("etag") @@ -397,8 +434,8 @@ def etag(self): def self_link(self): """URL for the job resource. - :rtype: str, or ``NoneType`` - :returns: the URL (None until set from the server). + Returns: + Optional[str]: the URL (None until set from the server). """ return self._properties.get("selfLink") @@ -406,8 +443,8 @@ def self_link(self): def user_email(self): """E-mail address of user who submitted the job. - :rtype: str, or ``NoneType`` - :returns: the URL (None until set from the server). + Returns: + Optional[str]: the URL (None until set from the server). """ return self._properties.get("user_email") @@ -415,8 +452,9 @@ def user_email(self): def created(self): """Datetime at which the job was created. - :rtype: ``datetime.datetime``, or ``NoneType`` - :returns: the creation time (None until set from the server). + Returns: + Optional[datetime.datetime]: + the creation time (None until set from the server). """ statistics = self._properties.get("statistics") if statistics is not None: @@ -428,8 +466,9 @@ def created(self): def started(self): """Datetime at which the job was started. - :rtype: ``datetime.datetime``, or ``NoneType`` - :returns: the start time (None until set from the server). + Returns: + Optional[datetime.datetime]: + the start time (None until set from the server). """ statistics = self._properties.get("statistics") if statistics is not None: @@ -441,8 +480,9 @@ def started(self): def ended(self): """Datetime at which the job finished. - :rtype: ``datetime.datetime``, or ``NoneType`` - :returns: the end time (None until set from the server). + Returns: + Optional[datetime.datetime]: + the end time (None until set from the server). """ statistics = self._properties.get("statistics") if statistics is not None: @@ -459,8 +499,8 @@ def _job_statistics(self): def error_result(self): """Error information about the job as a whole. - :rtype: mapping, or ``NoneType`` - :returns: the error information (None until set from the server). + Returns: + Optional[Mapping]: the error information (None until set from the server). """ status = self._properties.get("status") if status is not None: @@ -470,8 +510,9 @@ def error_result(self): def errors(self): """Information about individual errors generated by the job. - :rtype: list of mappings, or ``NoneType`` - :returns: the error information (None until set from the server). + Returns: + Optional[List[Mapping]]: + the error information (None until set from the server). """ status = self._properties.get("status") if status is not None: @@ -481,8 +522,9 @@ def errors(self): def state(self): """Status of the job. - :rtype: str, or ``NoneType`` - :returns: the state (None until set from the server). + Returns: + Optional[str]: + the state (None until set from the server). """ status = self._properties.get("status") if status is not None: @@ -499,8 +541,8 @@ def _copy_configuration_properties(self, configuration): def _set_properties(self, api_response): """Update properties from resource in body of ``api_response`` - :type api_response: dict - :param api_response: response returned from an API call + Args: + api_response (Dict): response returned from an API call. """ cleaned = api_response.copy() self._scrub_local_properties(cleaned) @@ -524,14 +566,18 @@ def _set_properties(self, api_response): def _get_resource_config(cls, resource): """Helper for :meth:`from_api_repr` - :type resource: dict - :param resource: resource for the job + Args: + resource (Dict): resource for the job. + + Returns: + (str, Dict): + tuple (string, dict), where the first element is the + job ID and the second contains job-specific configuration. - :rtype: dict - :returns: tuple (string, dict), where the first element is the - job ID and the second contains job-specific configuration. - :raises: :class:`KeyError` if the resource has no identifier, or - is missing the appropriate configuration. + Raises: + KeyError: + If the resource has no identifier, or + is missing the appropriate configuration. """ if "jobReference" not in resource or "jobId" not in resource["jobReference"]: raise KeyError( @@ -591,16 +637,15 @@ def exists(self, client=None, retry=DEFAULT_RETRY): See https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/get - :type client: :class:`~google.cloud.bigquery.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current dataset. + Args: + client (Optional[google.cloud.bigquery.client.Client]): + the client to use. If not passed, falls back to the + ``client`` stored on the current dataset. - :type retry: :class:`google.api_core.retry.Retry` - :param retry: (Optional) How to retry the RPC. + retry (google.api_core.retry.Retry): (Optional) How to retry the RPC. - :rtype: bool - :returns: Boolean indicating existence of the job. + Returns: + bool: Boolean indicating existence of the job. """ client = self._require_client(client) @@ -623,13 +668,12 @@ def reload(self, client=None, retry=DEFAULT_RETRY): See https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/get - :type client: :class:`~google.cloud.bigquery.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current dataset. + Args: + client (Optional[google.cloud.bigquery.client.Client]): + the client to use. If not passed, falls back to the + ``client`` stored on the current dataset. - :type retry: :class:`google.api_core.retry.Retry` - :param retry: (Optional) How to retry the RPC. + retry (google.api_core.retry.Retry): (Optional) How to retry the RPC. """ client = self._require_client(client) @@ -648,13 +692,13 @@ def cancel(self, client=None): See https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/cancel - :type client: :class:`~google.cloud.bigquery.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current dataset. + Args: + client (Optional[google.cloud.bigquery.client.Client]): + the client to use. If not passed, falls back to the + ``client`` stored on the current dataset. - :rtype: bool - :returns: Boolean indicating that the cancel request was sent. + Returns: + bool: Boolean indicating that the cancel request was sent. """ client = self._require_client(client) @@ -697,11 +741,11 @@ def _set_future_result(self): def done(self, retry=DEFAULT_RETRY): """Refresh the job and checks if it is complete. - :type retry: :class:`google.api_core.retry.Retry` - :param retry: (Optional) How to retry the RPC. + Args: + retry (google.api_core.retry.Retry): (Optional) How to retry the RPC. - :rtype: bool - :returns: True if the job is complete, False otherwise. + Returns: + bool: True if the job is complete, False otherwise. """ # Do not refresh is the state is already done, as the job will not # change once complete. @@ -712,21 +756,21 @@ def done(self, retry=DEFAULT_RETRY): def result(self, timeout=None, retry=DEFAULT_RETRY): """Start the job and wait for it to complete and get the result. - :type timeout: float - :param timeout: - How long (in seconds) to wait for job to complete before raising - a :class:`concurrent.futures.TimeoutError`. + Args: + timeout (float): + How long (in seconds) to wait for job to complete before raising + a :class:`concurrent.futures.TimeoutError`. - :type retry: :class:`google.api_core.retry.Retry` - :param retry: (Optional) How to retry the RPC. + retry (google.api_core.retry.Retry): (Optional) How to retry the RPC. - :rtype: _AsyncJob - :returns: This instance. + Returns: + _AsyncJob: This instance. - :raises: - :class:`~google.cloud.exceptions.GoogleCloudError` if the job - failed or :class:`concurrent.futures.TimeoutError` if the job did - not complete in the given timeout. + Raises: + google.cloud.exceptions.GoogleCloudError: + if the job failed. + concurrent.futures.TimeoutError: + if the job did not complete in the given timeout. """ if self.state is None: self._begin(retry=retry) @@ -740,8 +784,8 @@ def cancelled(self): cancelled in the API. This method is here to satisfy the interface for :class:`google.api_core.future.Future`. - :rtype: bool - :returns: False + Returns: + bool: False """ return ( self.error_result is not None @@ -752,7 +796,7 @@ def cancelled(self): class _JobConfig(object): """Abstract base class for job configuration objects. - Arguments: + Args: job_type (str): The key to use for the job configuration. """ @@ -795,10 +839,10 @@ def _get_sub_prop(self, key, default=None): _helpers._get_sub_prop( self._properties, ['query', 'destinationTable']) - Arguments: + Args: key (str): - Key for the value to get in the - ``self._properties[self._job_type]`` dictionary. + Key for the value to get in the + ``self._properties[self._job_type]`` dictionary. default (object): (Optional) Default value to return if the key is not found. Defaults to :data:`None`. @@ -824,10 +868,10 @@ def _set_sub_prop(self, key, value): _helper._set_sub_prop( self._properties, ['query', 'useLegacySql'], False) - Arguments: + Args: key (str): - Key to set in the ``self._properties[self._job_type]`` - dictionary. + Key to set in the ``self._properties[self._job_type]`` + dictionary. value (object): Value to set. """ _helpers._set_sub_prop(self._properties, [self._job_type, key], value) @@ -846,18 +890,18 @@ def _del_sub_prop(self, key): _helper._del_sub_prop( self._properties, ['query', 'useLegacySql']) - Arguments: + Args: key (str): - Key to remove in the ``self._properties[self._job_type]`` - dictionary. + Key to remove in the ``self._properties[self._job_type]`` + dictionary. """ _helpers._del_sub_prop(self._properties, [self._job_type, key]) def to_api_repr(self): """Build an API representation of the job config. - :rtype: dict - :returns: A dictionary in the format used by the BigQuery API. + Returns: + Dict: A dictionary in the format used by the BigQuery API. """ return copy.deepcopy(self._properties) @@ -868,12 +912,12 @@ def _fill_from_default(self, default_job_config): config. The merge is done at the top-level as well as for keys one level below the job type. - Arguments: + Args: default_job_config (google.cloud.bigquery.job._JobConfig): The default job config that will be used to fill in self. Returns: - google.cloud.bigquery.job._JobConfig A new (merged) job config. + google.cloud.bigquery.job._JobConfig: A new (merged) job config. """ if self._job_type != default_job_config._job_type: raise TypeError( @@ -899,13 +943,13 @@ def _fill_from_default(self, default_job_config): def from_api_repr(cls, resource): """Factory: construct a job configuration given its API representation - :type resource: dict - :param resource: - An extract job configuration in the same representation as is - returned from the API. + Args: + resource (Dict): + An extract job configuration in the same representation as is + returned from the API. - :rtype: :class:`google.cloud.bigquery.job._JobConfig` - :returns: Configuration parsed from ``resource``. + Returns: + google.cloud.bigquery.job._JobConfig: Configuration parsed from ``resource``. """ config = cls() config._properties = copy.deepcopy(resource) @@ -928,7 +972,7 @@ def allow_jagged_rows(self): """bool: Allow missing trailing optional columns (CSV only). See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.allowJaggedRows + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad.FIELDS.allow_jagged_rows """ return self._get_sub_prop("allowJaggedRows") @@ -941,7 +985,7 @@ def allow_quoted_newlines(self): """bool: Allow quoted data containing newline characters (CSV only). See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.allowQuotedNewlines + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad.FIELDS.allow_quoted_newlines """ return self._get_sub_prop("allowQuotedNewlines") @@ -954,7 +998,7 @@ def autodetect(self): """bool: Automatically infer the schema from a sample of the data. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.autodetect + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad.FIELDS.autodetect """ return self._get_sub_prop("autodetect") @@ -996,7 +1040,7 @@ def create_disposition(self): for creating tables. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.createDisposition + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad.FIELDS.create_disposition """ return self._get_sub_prop("createDisposition") @@ -1006,14 +1050,14 @@ def create_disposition(self, value): @property def destination_encryption_configuration(self): - """google.cloud.bigquery.table.EncryptionConfiguration: Custom + """google.cloud.bigquery.encryption_configuration.EncryptionConfiguration: Custom encryption configuration for the destination table. Custom encryption configuration (e.g., Cloud KMS keys) or :data:`None` if using default encryption. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.destinationEncryptionConfiguration + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad.FIELDS.destination_encryption_configuration """ prop = self._get_sub_prop("destinationEncryptionConfiguration") if prop is not None: @@ -1034,7 +1078,7 @@ def destination_table_description(self): """Union[str, None] name given to destination table. See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.destinationTableProperties.description + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#DestinationTableProperties.FIELDS.description """ prop = self._get_sub_prop("destinationTableProperties") if prop is not None: @@ -1053,7 +1097,7 @@ def destination_table_friendly_name(self): """Union[str, None] name given to destination table. See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.destinationTableProperties.friendlyName + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#DestinationTableProperties.FIELDS.friendly_name """ prop = self._get_sub_prop("destinationTableProperties") if prop is not None: @@ -1073,7 +1117,7 @@ def encoding(self): data. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.encoding + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad.FIELDS.encoding """ return self._get_sub_prop("encoding") @@ -1086,7 +1130,7 @@ def field_delimiter(self): """str: The separator for fields in a CSV file. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.fieldDelimiter + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad.FIELDS.field_delimiter """ return self._get_sub_prop("fieldDelimiter") @@ -1099,7 +1143,7 @@ def ignore_unknown_values(self): """bool: Ignore extra values not represented in the table schema. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.ignoreUnknownValues + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad.FIELDS.ignore_unknown_values """ return self._get_sub_prop("ignoreUnknownValues") @@ -1112,7 +1156,7 @@ def max_bad_records(self): """int: Number of invalid rows to ignore. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.maxBadRecords + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad.FIELDS.max_bad_records """ return _helpers._int_or_none(self._get_sub_prop("maxBadRecords")) @@ -1125,7 +1169,7 @@ def null_marker(self): """str: Represents a null value (CSV only). See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.nullMarker + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad.FIELDS.null_marker """ return self._get_sub_prop("nullMarker") @@ -1138,7 +1182,7 @@ def quote_character(self): """str: Character used to quote data sections (CSV only). See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.quote + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad.FIELDS.quote """ return self._get_sub_prop("quote") @@ -1146,13 +1190,49 @@ def quote_character(self): def quote_character(self, value): self._set_sub_prop("quote", value) + @property + def range_partitioning(self): + """Optional[google.cloud.bigquery.table.RangePartitioning]: + Configures range-based partitioning for destination table. + + .. note:: + **Beta**. The integer range partitioning feature is in a + pre-release state and might change or have limited support. + + Only specify at most one of + :attr:`~google.cloud.bigquery.job.LoadJobConfig.time_partitioning` or + :attr:`~google.cloud.bigquery.job.LoadJobConfig.range_partitioning`. + + Raises: + ValueError: + If the value is not + :class:`~google.cloud.bigquery.table.RangePartitioning` or + :data:`None`. + """ + resource = self._get_sub_prop("rangePartitioning") + if resource is not None: + return RangePartitioning(_properties=resource) + + @range_partitioning.setter + def range_partitioning(self, value): + resource = value + if isinstance(value, RangePartitioning): + resource = value._properties + elif value is not None: + raise ValueError( + "Expected value to be RangePartitioning or None, got {}.".format(value) + ) + self._set_sub_prop("rangePartitioning", resource) + @property def schema(self): - """List[google.cloud.bigquery.schema.SchemaField]: Schema of the - destination table. + """Sequence[Union[ \ + :class:`~google.cloud.bigquery.schema.SchemaField`, \ + Mapping[str, Any] \ + ]]: Schema of the destination table. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.schema + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad.FIELDS.schema """ schema = _helpers._get_sub_prop(self._properties, ["load", "schema", "fields"]) if schema is None: @@ -1165,8 +1245,8 @@ def schema(self, value): self._del_sub_prop("schema") return - if not all(hasattr(field, "to_api_repr") for field in value): - raise ValueError("Schema items must be fields") + value = _to_schema_fields(value) + _helpers._set_sub_prop( self._properties, ["load", "schema", "fields"], @@ -1190,7 +1270,7 @@ def skip_leading_rows(self): """int: Number of rows to skip when reading data (CSV only). See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.skipLeadingRows + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad.FIELDS.skip_leading_rows """ return _helpers._int_or_none(self._get_sub_prop("skipLeadingRows")) @@ -1203,7 +1283,7 @@ def source_format(self): """google.cloud.bigquery.job.SourceFormat: File format of the data. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.sourceFormat + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad.FIELDS.source_format """ return self._get_sub_prop("sourceFormat") @@ -1215,6 +1295,10 @@ def source_format(self, value): def time_partitioning(self): """google.cloud.bigquery.table.TimePartitioning: Specifies time-based partitioning for the destination table. + + Only specify at most one of + :attr:`~google.cloud.bigquery.job.LoadJobConfig.time_partitioning` or + :attr:`~google.cloud.bigquery.job.LoadJobConfig.range_partitioning`. """ prop = self._get_sub_prop("timePartitioning") if prop is not None: @@ -1248,7 +1332,7 @@ def write_disposition(self): the destination table already exists. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.writeDisposition + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad.FIELDS.write_disposition """ return self._get_sub_prop("writeDisposition") @@ -1262,21 +1346,19 @@ class LoadJob(_AsyncJob): Can load from Google Cloud Storage URIs or from a file. - :type job_id: str - :param job_id: the job's ID + Args: + job_id (str): the job's ID - :type source_uris: sequence of string or ``NoneType`` - :param source_uris: - URIs of one or more data files to be loaded. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.sourceUris - for supported URI formats. Pass None for jobs that load from a file. + source_uris (Optional[Sequence[str]]): + URIs of one or more data files to be loaded. See + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad.FIELDS.source_uris + for supported URI formats. Pass None for jobs that load from a file. - :type destination: :class:`google.cloud.bigquery.table.TableReference` - :param destination: reference to table into which data is to be loaded. + destination (google.cloud.bigquery.table.TableReference): reference to table into which data is to be loaded. - :type client: :class:`google.cloud.bigquery.client.Client` - :param client: A client which holds credentials and project configuration - for the dataset (which requires a project). + client (google.cloud.bigquery.client.Client): + A client which holds credentials and project configuration + for the dataset (which requires a project). """ _JOB_TYPE = "load" @@ -1296,7 +1378,7 @@ def destination(self): """google.cloud.bigquery.table.TableReference: table where loaded rows are written See: - https://g.co/cloud/bigquery/docs/reference/rest/v2/jobs#configuration.load.destinationTable + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationLoad.FIELDS.destination_table """ return self._destination @@ -1400,7 +1482,7 @@ def schema(self): @property def destination_encryption_configuration(self): - """google.cloud.bigquery.table.EncryptionConfiguration: Custom + """google.cloud.bigquery.encryption_configuration.EncryptionConfiguration: Custom encryption configuration for the destination table. Custom encryption configuration (e.g., Cloud KMS keys) @@ -1413,22 +1495,29 @@ def destination_encryption_configuration(self): @property def destination_table_description(self): - """Union[str, None] name given to destination table. + """Optional[str] name given to destination table. See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.destinationTableProperties.description + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#DestinationTableProperties.FIELDS.description """ return self._configuration.destination_table_description @property def destination_table_friendly_name(self): - """Union[str, None] name given to destination table. + """Optional[str] name given to destination table. See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.destinationTableProperties.friendlyName + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#DestinationTableProperties.FIELDS.friendly_name """ return self._configuration.destination_table_friendly_name + @property + def range_partitioning(self): + """See + :attr:`google.cloud.bigquery.job.LoadJobConfig.range_partitioning`. + """ + return self._configuration.range_partitioning + @property def time_partitioning(self): """See @@ -1461,9 +1550,11 @@ def schema_update_options(self): def input_file_bytes(self): """Count of bytes loaded from source files. - :rtype: int, or ``NoneType`` - :returns: the count (None until set from the server). - :raises: ValueError for invalid value types. + Returns: + Optional[int]: the count (None until set from the server). + + Raises: + ValueError: for invalid value types. """ return _helpers._int_or_none( _helpers._get_sub_prop( @@ -1475,8 +1566,8 @@ def input_file_bytes(self): def input_files(self): """Count of source files. - :rtype: int, or ``NoneType`` - :returns: the count (None until set from the server). + Returns: + Optional[int]: the count (None until set from the server). """ return _helpers._int_or_none( _helpers._get_sub_prop( @@ -1488,8 +1579,8 @@ def input_files(self): def output_bytes(self): """Count of bytes saved to destination table. - :rtype: int, or ``NoneType`` - :returns: the count (None until set from the server). + Returns: + Optional[int]: the count (None until set from the server). """ return _helpers._int_or_none( _helpers._get_sub_prop( @@ -1501,8 +1592,8 @@ def output_bytes(self): def output_rows(self): """Count of rows saved to destination table. - :rtype: int, or ``NoneType`` - :returns: the count (None until set from the server). + Returns: + Optional[int]: the count (None until set from the server). """ return _helpers._int_or_none( _helpers._get_sub_prop( @@ -1539,15 +1630,15 @@ def from_api_repr(cls, resource, client): This method assumes that the project found in the resource matches the client's project. - :type resource: dict - :param resource: dataset job representation returned from the API + Args: + resource (Dict): dataset job representation returned from the API - :type client: :class:`google.cloud.bigquery.client.Client` - :param client: Client which holds credentials and project - configuration for the dataset. + client (google.cloud.bigquery.client.Client): + Client which holds credentials and project + configuration for the dataset. - :rtype: :class:`google.cloud.bigquery.job.LoadJob` - :returns: Job parsed from ``resource``. + Returns: + google.cloud.bigquery.job.LoadJob: Job parsed from ``resource``. """ config_resource = resource.get("configuration", {}) config = LoadJobConfig.from_api_repr(config_resource) @@ -1580,7 +1671,7 @@ def create_disposition(self): for creating tables. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.copy.createDisposition + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationTableCopy.FIELDS.create_disposition """ return self._get_sub_prop("createDisposition") @@ -1594,7 +1685,7 @@ def write_disposition(self): the destination table already exists. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.copy.writeDisposition + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationTableCopy.FIELDS.write_disposition """ return self._get_sub_prop("writeDisposition") @@ -1604,14 +1695,14 @@ def write_disposition(self, value): @property def destination_encryption_configuration(self): - """google.cloud.bigquery.table.EncryptionConfiguration: Custom + """google.cloud.bigquery.encryption_configuration.EncryptionConfiguration: Custom encryption configuration for the destination table. Custom encryption configuration (e.g., Cloud KMS keys) or :data:`None` if using default encryption. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.copy.destinationEncryptionConfiguration + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationTableCopy.FIELDS.destination_encryption_configuration """ prop = self._get_sub_prop("destinationEncryptionConfiguration") if prop is not None: @@ -1629,22 +1720,19 @@ def destination_encryption_configuration(self, value): class CopyJob(_AsyncJob): """Asynchronous job: copy data into a table from other tables. - :type job_id: str - :param job_id: the job's ID, within the project belonging to ``client``. + Args: + job_id (str): the job's ID, within the project belonging to ``client``. - :type sources: list of :class:`google.cloud.bigquery.table.TableReference` - :param sources: Table from which data is to be loaded. + sources (List[google.cloud.bigquery.table.TableReference]): Table from which data is to be loaded. - :type destination: :class:`google.cloud.bigquery.table.TableReference` - :param destination: Table into which data is to be loaded. + destination (google.cloud.bigquery.table.TableReference): Table into which data is to be loaded. - :type client: :class:`google.cloud.bigquery.client.Client` - :param client: A client which holds credentials and project configuration - for the dataset (which requires a project). + client (google.cloud.bigquery.client.Client): + A client which holds credentials and project configuration + for the dataset (which requires a project). - :type job_config: :class:`~google.cloud.bigquery.job.CopyJobConfig` - :param job_config: - (Optional) Extra configuration options for the copy job. + job_config (google.cloud.bigquery.job.CopyJobConfig): + (Optional) Extra configuration options for the copy job. """ _JOB_TYPE = "copy" @@ -1675,7 +1763,7 @@ def write_disposition(self): @property def destination_encryption_configuration(self): - """google.cloud.bigquery.table.EncryptionConfiguration: Custom + """google.cloud.bigquery.encryption_configuration.EncryptionConfiguration: Custom encryption configuration for the destination table. Custom encryption configuration (e.g., Cloud KMS keys) or :data:`None` @@ -1728,15 +1816,15 @@ def from_api_repr(cls, resource, client): This method assumes that the project found in the resource matches the client's project. - :type resource: dict - :param resource: dataset job representation returned from the API + Args: + resource (Dict): dataset job representation returned from the API - :type client: :class:`google.cloud.bigquery.client.Client` - :param client: Client which holds credentials and project - configuration for the dataset. + client (google.cloud.bigquery.client.Client): + Client which holds credentials and project + configuration for the dataset. - :rtype: :class:`google.cloud.bigquery.job.CopyJob` - :returns: Job parsed from ``resource``. + Returns: + google.cloud.bigquery.job.CopyJob: Job parsed from ``resource``. """ job_id, config_resource = cls._get_resource_config(resource) config = CopyJobConfig.from_api_repr(config_resource) @@ -1775,7 +1863,7 @@ def compression(self): exported files. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.extract.compression + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationExtract.FIELDS.compression """ return self._get_sub_prop("compression") @@ -1788,7 +1876,7 @@ def destination_format(self): """google.cloud.bigquery.job.DestinationFormat: Exported file format. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.extract.destinationFormat + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationExtract.FIELDS.destination_format """ return self._get_sub_prop("destinationFormat") @@ -1801,7 +1889,7 @@ def field_delimiter(self): """str: Delimiter to use between fields in the exported data. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.extract.fieldDelimiter + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationExtract.FIELDS.field_delimiter """ return self._get_sub_prop("fieldDelimiter") @@ -1814,7 +1902,7 @@ def print_header(self): """bool: Print a header row in the exported data. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.extract.printHeader + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationExtract.FIELDS.print_header """ return self._get_sub_prop("printHeader") @@ -1826,24 +1914,21 @@ def print_header(self, value): class ExtractJob(_AsyncJob): """Asynchronous job: extract data from a table into Cloud Storage. - :type job_id: str - :param job_id: the job's ID + Args: + job_id (str): the job's ID. - :type source: :class:`google.cloud.bigquery.table.TableReference` - :param source: Table into which data is to be loaded. + source (google.cloud.bigquery.table.TableReference): + Table into which data is to be loaded. - :type destination_uris: list of string - :param destination_uris: - URIs describing where the extracted data will be written in Cloud - Storage, using the format ``gs:///``. + destination_uris (List[str]): + URIs describing where the extracted data will be written in Cloud + Storage, using the format ``gs:///``. - :type client: :class:`google.cloud.bigquery.client.Client` - :param client: - A client which holds credentials and project configuration. + client (google.cloud.bigquery.client.Client): + A client which holds credentials and project configuration. - :type job_config: :class:`~google.cloud.bigquery.job.ExtractJobConfig` - :param job_config: - (Optional) Extra configuration options for the extract job. + job_config (google.cloud.bigquery.job.ExtractJobConfig): + (Optional) Extra configuration options for the extract job. """ _JOB_TYPE = "extract" @@ -1891,14 +1976,15 @@ def destination_uri_file_counts(self): """Return file counts from job statistics, if present. See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#statistics.extract.destinationUriFileCounts + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobStatistics4.FIELDS.destination_uri_file_counts Returns: - a list of integer counts, each representing the number of files - per destination URI or URI pattern specified in the extract - configuration. These values will be in the same order as the URIs - specified in the 'destinationUris' field. Returns None if job is - not yet complete. + List[int]: + A list of integer counts, each representing the number of files + per destination URI or URI pattern specified in the extract + configuration. These values will be in the same order as the URIs + specified in the 'destinationUris' field. Returns None if job is + not yet complete. """ counts = self._job_statistics().get("destinationUriFileCounts") if counts is not None: @@ -1938,15 +2024,15 @@ def from_api_repr(cls, resource, client): This method assumes that the project found in the resource matches the client's project. - :type resource: dict - :param resource: dataset job representation returned from the API + Args: + resource (Dict): dataset job representation returned from the API - :type client: :class:`google.cloud.bigquery.client.Client` - :param client: Client which holds credentials and project - configuration for the dataset. + client (google.cloud.bigquery.client.Client): + Client which holds credentials and project + configuration for the dataset. - :rtype: :class:`google.cloud.bigquery.job.ExtractJob` - :returns: Job parsed from ``resource``. + Returns: + google.cloud.bigquery.job.ExtractJob: Job parsed from ``resource``. """ job_id, config_resource = cls._get_resource_config(resource) config = ExtractJobConfig.from_api_repr(config_resource) @@ -2007,14 +2093,14 @@ def __init__(self, **kwargs): @property def destination_encryption_configuration(self): - """google.cloud.bigquery.table.EncryptionConfiguration: Custom + """google.cloud.bigquery.encryption_configuration.EncryptionConfiguration: Custom encryption configuration for the destination table. Custom encryption configuration (e.g., Cloud KMS keys) or :data:`None` if using default encryption. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.query.destinationEncryptionConfiguration + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationQuery.FIELDS.destination_encryption_configuration """ prop = self._get_sub_prop("destinationEncryptionConfiguration") if prop is not None: @@ -2033,7 +2119,7 @@ def allow_large_results(self): """bool: Allow large query results tables (legacy SQL, only) See - https://g.co/cloud/bigquery/docs/reference/rest/v2/jobs#configuration.query.allowLargeResults + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationQuery.FIELDS.allow_large_results """ return self._get_sub_prop("allowLargeResults") @@ -2047,7 +2133,7 @@ def create_disposition(self): for creating tables. See - https://g.co/cloud/bigquery/docs/reference/rest/v2/jobs#configuration.query.createDisposition + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationQuery.FIELDS.create_disposition """ return self._get_sub_prop("createDisposition") @@ -2070,7 +2156,7 @@ def default_dataset(self): separated by ``.``. For example: ``your-project.your_dataset``. See - https://g.co/cloud/bigquery/docs/reference/v2/jobs#configuration.query.defaultDataset + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationQuery.FIELDS.default_dataset """ prop = self._get_sub_prop("defaultDataset") if prop is not None: @@ -2107,7 +2193,7 @@ def destination(self): ``your-project.your_dataset.your_table``. See - https://g.co/cloud/bigquery/docs/reference/rest/v2/jobs#configuration.query.destinationTable + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationQuery.FIELDS.destination_table """ prop = self._get_sub_prop("destinationTable") if prop is not None: @@ -2130,7 +2216,7 @@ def dry_run(self): costs. See - https://g.co/cloud/bigquery/docs/reference/v2/jobs#configuration.dryRun + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfiguration.FIELDS.dry_run """ return self._properties.get("dryRun") @@ -2143,7 +2229,7 @@ def flatten_results(self): """bool: Flatten nested/repeated fields in results. (Legacy SQL only) See - https://g.co/cloud/bigquery/docs/reference/rest/v2/jobs#configuration.query.flattenResults + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationQuery.FIELDS.flatten_results """ return self._get_sub_prop("flattenResults") @@ -2157,7 +2243,7 @@ def maximum_billing_tier(self): queries. See - https://g.co/cloud/bigquery/docs/reference/rest/v2/jobs#configuration.query.maximumBillingTier + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationQuery.FIELDS.maximum_billing_tier """ return self._get_sub_prop("maximumBillingTier") @@ -2170,7 +2256,7 @@ def maximum_bytes_billed(self): """int: Maximum bytes to be billed for this job or :data:`None` if not set. See - https://g.co/cloud/bigquery/docs/reference/rest/v2/jobs#configuration.query.maximumBytesBilled + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationQuery.FIELDS.maximum_bytes_billed """ return _helpers._int_or_none(self._get_sub_prop("maximumBytesBilled")) @@ -2183,7 +2269,7 @@ def priority(self): """google.cloud.bigquery.job.QueryPriority: Priority of the query. See - https://g.co/cloud/bigquery/docs/reference/rest/v2/jobs#configuration.query.priority + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationQuery.FIELDS.priority """ return self._get_sub_prop("priority") @@ -2199,7 +2285,7 @@ def query_parameters(self): for parameterized query (empty by default) See: - https://g.co/cloud/bigquery/docs/reference/rest/v2/jobs#configuration.query.queryParameters + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationQuery.FIELDS.query_parameters """ prop = self._get_sub_prop("queryParameters", default=[]) return _from_api_repr_query_parameters(prop) @@ -2208,13 +2294,47 @@ def query_parameters(self): def query_parameters(self, values): self._set_sub_prop("queryParameters", _to_api_repr_query_parameters(values)) + @property + def range_partitioning(self): + """Optional[google.cloud.bigquery.table.RangePartitioning]: + Configures range-based partitioning for destination table. + + .. note:: + **Beta**. The integer range partitioning feature is in a + pre-release state and might change or have limited support. + + Only specify at most one of + :attr:`~google.cloud.bigquery.job.LoadJobConfig.time_partitioning` or + :attr:`~google.cloud.bigquery.job.LoadJobConfig.range_partitioning`. + + Raises: + ValueError: + If the value is not + :class:`~google.cloud.bigquery.table.RangePartitioning` or + :data:`None`. + """ + resource = self._get_sub_prop("rangePartitioning") + if resource is not None: + return RangePartitioning(_properties=resource) + + @range_partitioning.setter + def range_partitioning(self, value): + resource = value + if isinstance(value, RangePartitioning): + resource = value._properties + elif value is not None: + raise ValueError( + "Expected value to be RangePartitioning or None, got {}.".format(value) + ) + self._set_sub_prop("rangePartitioning", resource) + @property def udf_resources(self): """List[google.cloud.bigquery.query.UDFResource]: user defined function resources (empty by default) See: - https://g.co/cloud/bigquery/docs/reference/rest/v2/jobs#configuration.query.userDefinedFunctionResources + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationQuery.FIELDS.user_defined_function_resources """ prop = self._get_sub_prop("userDefinedFunctionResources", default=[]) return _from_api_repr_udf_resources(prop) @@ -2230,7 +2350,7 @@ def use_legacy_sql(self): """bool: Use legacy SQL syntax. See - https://g.co/cloud/bigquery/docs/reference/v2/jobs#configuration.query.useLegacySql + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationQuery.FIELDS.use_legacy_sql """ return self._get_sub_prop("useLegacySql") @@ -2243,7 +2363,7 @@ def use_query_cache(self): """bool: Look for the query result in the cache. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.query.useQueryCache + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationQuery.FIELDS.use_query_cache """ return self._get_sub_prop("useQueryCache") @@ -2257,7 +2377,7 @@ def write_disposition(self): the destination table already exists. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.query.writeDisposition + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationQuery.FIELDS.write_disposition """ return self._get_sub_prop("writeDisposition") @@ -2271,7 +2391,7 @@ def table_definitions(self): Definitions for external tables or :data:`None` if not set. See - https://g.co/cloud/bigquery/docs/reference/rest/v2/jobs#configuration.query.tableDefinitions + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationQuery.FIELDS.external_table_definitions """ prop = self._get_sub_prop("tableDefinitions") if prop is not None: @@ -2284,8 +2404,18 @@ def table_definitions(self, values): @property def time_partitioning(self): - """google.cloud.bigquery.table.TimePartitioning: Specifies time-based - partitioning for the destination table. + """Optional[google.cloud.bigquery.table.TimePartitioning]: Specifies + time-based partitioning for the destination table. + + Only specify at most one of + :attr:`~google.cloud.bigquery.job.LoadJobConfig.time_partitioning` or + :attr:`~google.cloud.bigquery.job.LoadJobConfig.range_partitioning`. + + Raises: + ValueError: + If the value is not + :class:`~google.cloud.bigquery.table.TimePartitioning` or + :data:`None`. """ prop = self._get_sub_prop("timePartitioning") if prop is not None: @@ -2301,7 +2431,7 @@ def time_partitioning(self, value): @property def clustering_fields(self): - """Union[List[str], None]: Fields defining clustering for the table + """Optional[List[str]]: Fields defining clustering for the table (Defaults to :data:`None`). @@ -2318,7 +2448,7 @@ def clustering_fields(self): @clustering_fields.setter def clustering_fields(self, value): - """Union[List[str], None]: Fields defining clustering for the table + """Optional[List[str]]: Fields defining clustering for the table (Defaults to :data:`None`). """ @@ -2343,7 +2473,7 @@ def to_api_repr(self): """Build an API representation of the query job config. Returns: - dict: A dictionary in the format used by the BigQuery API. + Dict: A dictionary in the format used by the BigQuery API. """ resource = copy.deepcopy(self._properties) @@ -2362,19 +2492,17 @@ def to_api_repr(self): class QueryJob(_AsyncJob): """Asynchronous job: query tables. - :type job_id: str - :param job_id: the job's ID, within the project belonging to ``client``. + Args: + job_id (str): the job's ID, within the project belonging to ``client``. - :type query: str - :param query: SQL query string + query (str): SQL query string. - :type client: :class:`google.cloud.bigquery.client.Client` - :param client: A client which holds credentials and project configuration - for the dataset (which requires a project). + client (google.cloud.bigquery.client.Client): + A client which holds credentials and project configuration + for the dataset (which requires a project). - :type job_config: :class:`~google.cloud.bigquery.job.QueryJobConfig` - :param job_config: - (Optional) Extra configuration options for the query job. + job_config (google.cloud.bigquery.job.QueryJobConfig): + (Optional) Extra configuration options for the query job. """ _JOB_TYPE = "query" @@ -2426,7 +2554,7 @@ def destination(self): @property def destination_encryption_configuration(self): - """google.cloud.bigquery.table.EncryptionConfiguration: Custom + """google.cloud.bigquery.encryption_configuration.EncryptionConfiguration: Custom encryption configuration for the destination table. Custom encryption configuration (e.g., Cloud KMS keys) or :data:`None` @@ -2463,7 +2591,7 @@ def query(self): """str: The query text used in this query job. See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.query.query + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobConfigurationQuery.FIELDS.query """ return _helpers._get_sub_prop( self._properties, ["configuration", "query", "query"] @@ -2518,6 +2646,13 @@ def maximum_bytes_billed(self): """ return self._configuration.maximum_bytes_billed + @property + def range_partitioning(self): + """See + :attr:`google.cloud.bigquery.job.QueryJobConfig.range_partitioning`. + """ + return self._configuration.range_partitioning + @property def table_definitions(self): """See @@ -2566,15 +2701,15 @@ def _copy_configuration_properties(self, configuration): def from_api_repr(cls, resource, client): """Factory: construct a job given its API representation - :type resource: dict - :param resource: dataset job representation returned from the API + Args: + resource (Dict): dataset job representation returned from the API - :type client: :class:`google.cloud.bigquery.client.Client` - :param client: Client which holds credentials and project - configuration for the dataset. + client (google.cloud.bigquery.client.Client): + Client which holds credentials and project + configuration for the dataset. - :rtype: :class:`google.cloud.bigquery.job.QueryJob` - :returns: Job parsed from ``resource``. + Returns: + google.cloud.bigquery.job.QueryJob: Job parsed from ``resource``. """ job_id, config = cls._get_resource_config(resource) query = _helpers._get_sub_prop(config, ["query", "query"]) @@ -2587,11 +2722,12 @@ def query_plan(self): """Return query plan from job statistics, if present. See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#statistics.query.queryPlan + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobStatistics2.FIELDS.query_plan - :rtype: list of :class:`QueryPlanEntry` - :returns: mappings describing the query plan, or an empty list - if the query has not yet completed. + Returns: + List[QueryPlanEntry]: + mappings describing the query plan, or an empty list + if the query has not yet completed. """ plan_entries = self._job_statistics().get("queryPlan", ()) return [QueryPlanEntry.from_api_repr(entry) for entry in plan_entries] @@ -2609,11 +2745,12 @@ def total_bytes_processed(self): """Return total bytes processed from job statistics, if present. See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#statistics.query.totalBytesProcessed + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobStatistics2.FIELDS.total_bytes_processed - :rtype: int or None - :returns: total bytes processed by the job, or None if job is not - yet complete. + Returns: + Optional[int]: + Total bytes processed by the job, or None if job is not + yet complete. """ result = self._job_statistics().get("totalBytesProcessed") if result is not None: @@ -2625,11 +2762,12 @@ def total_bytes_billed(self): """Return total bytes billed from job statistics, if present. See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#statistics.query.totalBytesBilled + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobStatistics2.FIELDS.total_bytes_billed - :rtype: int or None - :returns: total bytes processed by the job, or None if job is not - yet complete. + Returns: + Optional[int]: + Total bytes processed by the job, or None if job is not + yet complete. """ result = self._job_statistics().get("totalBytesBilled") if result is not None: @@ -2641,11 +2779,12 @@ def billing_tier(self): """Return billing tier from job statistics, if present. See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#statistics.query.billingTier + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobStatistics2.FIELDS.billing_tier - :rtype: int or None - :returns: billing tier used by the job, or None if job is not - yet complete. + Returns: + Optional[int]: + Billing tier used by the job, or None if job is not + yet complete. """ return self._job_statistics().get("billingTier") @@ -2654,11 +2793,12 @@ def cache_hit(self): """Return whether or not query results were served from cache. See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#statistics.query.cacheHit + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobStatistics2.FIELDS.cache_hit - :rtype: bool or None - :returns: whether the query results were returned from cache, or None - if job is not yet complete. + Returns: + Optional[bool]: + whether the query results were returned from cache, or None + if job is not yet complete. """ return self._job_statistics().get("cacheHit") @@ -2667,7 +2807,7 @@ def ddl_operation_performed(self): """Optional[str]: Return the DDL operation performed. See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#statistics.query.ddlOperationPerformed + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobStatistics2.FIELDS.ddl_operation_performed """ return self._job_statistics().get("ddlOperationPerformed") @@ -2678,7 +2818,7 @@ def ddl_target_routine(self): for CREATE/DROP FUNCTION/PROCEDURE queries. See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#jobstatistics + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobStatistics2.FIELDS.ddl_target_routine """ prop = self._job_statistics().get("ddlTargetRoutine") if prop is not None: @@ -2691,7 +2831,7 @@ def ddl_target_table(self): for CREATE/DROP TABLE/VIEW queries. See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#statistics.query.ddlTargetTable + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobStatistics2.FIELDS.ddl_target_table """ prop = self._job_statistics().get("ddlTargetTable") if prop is not None: @@ -2703,11 +2843,12 @@ def num_dml_affected_rows(self): """Return the number of DML rows affected by the job. See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#statistics.query.numDmlAffectedRows + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobStatistics2.FIELDS.num_dml_affected_rows - :rtype: int or None - :returns: number of DML rows affected by the job, or None if job is not - yet complete. + Returns: + Optional[int]: + number of DML rows affected by the job, or None if job is not + yet complete. """ result = self._job_statistics().get("numDmlAffectedRows") if result is not None: @@ -2724,11 +2865,12 @@ def statement_type(self): """Return statement type from job statistics, if present. See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#statistics.query.statementType + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobStatistics2.FIELDS.statement_type - :rtype: str or None - :returns: type of statement used by the job, or None if job is not - yet complete. + Returns: + Optional[str]: + type of statement used by the job, or None if job is not + yet complete. """ return self._job_statistics().get("statementType") @@ -2737,11 +2879,12 @@ def referenced_tables(self): """Return referenced tables from job statistics, if present. See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#statistics.query.referencedTables + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobStatistics2.FIELDS.referenced_tables - :rtype: list of dict - :returns: mappings describing the query plan, or an empty list - if the query has not yet completed. + Returns: + List[Dict]: + mappings describing the query plan, or an empty list + if the query has not yet completed. """ tables = [] datasets_by_project_name = {} @@ -2766,15 +2909,16 @@ def undeclared_query_parameters(self): """Return undeclared query parameters from job statistics, if present. See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#statistics.query.undeclaredQueryParameters + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobStatistics2.FIELDS.undeclared_query_parameters - :rtype: - list of - :class:`~google.cloud.bigquery.ArrayQueryParameter`, - :class:`~google.cloud.bigquery.ScalarQueryParameter`, or - :class:`~google.cloud.bigquery.StructQueryParameter` - :returns: undeclared parameters, or an empty list if the query has - not yet completed. + Returns: + List[Union[ \ + google.cloud.bigquery.query.ArrayQueryParameter, \ + google.cloud.bigquery.query.ScalarQueryParameter, \ + google.cloud.bigquery.query.StructQueryParameter \ + ]]: + Undeclared parameters, or an empty list if the query has + not yet completed. """ parameters = [] undeclared = self._job_statistics().get("undeclaredQueryParameters", ()) @@ -2798,11 +2942,12 @@ def estimated_bytes_processed(self): """Return the estimated number of bytes processed by the query. See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#statistics.query.estimatedBytesProcessed + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#JobStatistics2.FIELDS.estimated_bytes_processed - :rtype: int or None - :returns: number of DML rows affected by the job, or None if job is not - yet complete. + Returns: + Optional[int]: + number of DML rows affected by the job, or None if job is not + yet complete. """ result = self._job_statistics().get("estimatedBytesProcessed") if result is not None: @@ -2812,8 +2957,8 @@ def estimated_bytes_processed(self): def done(self, retry=DEFAULT_RETRY): """Refresh the job and checks if it is complete. - :rtype: bool - :returns: True if the job is complete, False otherwise. + Returns: + bool: True if the job is complete, False otherwise. """ # Since the API to getQueryResults can hang up to the timeout value # (default of 10 seconds), set the timeout parameter to ensure that @@ -2859,8 +3004,8 @@ def _format_for_exception(query, job_id): query (str): The SQL query to format. job_id (str): The ID of the job that ran the query. - Returns: (str) - A formatted query text. + Returns: + str: A formatted query text. """ template = "\n\n(job ID: {job_id})\n\n{header}\n\n{ruler}\n{body}\n{ruler}" @@ -2895,14 +3040,14 @@ def _begin(self, client=None, retry=DEFAULT_RETRY): How to retry the RPC. Raises: - ValueError: - If the job has already begun. + ValueError: If the job has already begun. """ try: super(QueryJob, self)._begin(client=client, retry=retry) except exceptions.GoogleCloudError as exc: exc.message += self._format_for_exception(self.query, self.job_id) + exc.query_job = self raise def result( @@ -2945,6 +3090,7 @@ def result( ) except exceptions.GoogleCloudError as exc: exc.message += self._format_for_exception(self.query, self.job_id) + exc.query_job = self raise # If the query job is complete but there are no query results, this was @@ -2989,9 +3135,7 @@ def to_arrow(self, progress_bar_type=None, bqstorage_client=None): ``'tqdm_gui'`` Use the :func:`tqdm.tqdm_gui` function to display a progress bar as a graphical dialog box. - bqstorage_client ( \ - google.cloud.bigquery_storage_v1beta1.BigQueryStorageClient \ - ): + bqstorage_client (google.cloud.bigquery_storage_v1beta1.BigQueryStorageClient): **Beta Feature** Optional. A BigQuery Storage API client. If supplied, use the faster BigQuery Storage API to fetch rows from BigQuery. This API is a billable API. @@ -3024,9 +3168,7 @@ def to_dataframe(self, bqstorage_client=None, dtypes=None, progress_bar_type=Non """Return a pandas DataFrame from a QueryJob Args: - bqstorage_client ( \ - google.cloud.bigquery_storage_v1beta1.BigQueryStorageClient \ - ): + bqstorage_client (google.cloud.bigquery_storage_v1beta1.BigQueryStorageClient): **Alpha Feature** Optional. A BigQuery Storage API client. If supplied, use the faster BigQuery Storage API to fetch rows from BigQuery. This API is a billable API. @@ -3040,9 +3182,7 @@ def to_dataframe(self, bqstorage_client=None, dtypes=None, progress_bar_type=Non **Caution**: There is a known issue reading small anonymous query result tables with the BQ Storage API. Write your query results to a destination table to work around this issue. - dtypes ( \ - Map[str, Union[str, pandas.Series.dtype]] \ - ): + dtypes (Map[str, Union[str, pandas.Series.dtype]]): Optional. A dictionary of column names pandas ``dtype``s. The provided ``dtype`` is used when constructing the series for the column specified. Otherwise, the default pandas behavior @@ -3079,11 +3219,10 @@ def __iter__(self): class QueryPlanEntryStep(object): """Map a single step in a query plan entry. - :type kind: str - :param kind: step type + Args: + kind (str): step type. - :type substeps: - :param substeps: names of substeps + substeps (List): names of substeps. """ def __init__(self, kind, substeps): @@ -3094,11 +3233,11 @@ def __init__(self, kind, substeps): def from_api_repr(cls, resource): """Factory: construct instance from the JSON repr. - :type resource: dict - :param resource: JSON representation of the entry + Args: + resource (Dict): JSON representation of the entry. - :rtype: :class:`QueryPlanEntryStep` - :return: new instance built from the resource + Returns: + QueryPlanEntryStep: new instance built from the resource. """ return cls(kind=resource.get("kind"), substeps=resource.get("substeps", ())) @@ -3112,9 +3251,8 @@ class QueryPlanEntry(object): """QueryPlanEntry represents a single stage of a query execution plan. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#ExplainQueryStage for the underlying API representation within query statistics. - """ def __init__(self): @@ -3126,11 +3264,11 @@ def from_api_repr(cls, resource): Args: resource(Dict[str: object]): - ExplainQueryStage representation returned from API + ExplainQueryStage representation returned from API. Returns: google.cloud.bigquery.QueryPlanEntry: - Query plan entry parsed from ``resource`` + Query plan entry parsed from ``resource``. """ entry = cls() entry._properties = resource @@ -3138,17 +3276,17 @@ def from_api_repr(cls, resource): @property def name(self): - """Union[str, None]: Human-readable name of the stage.""" + """Optional[str]: Human-readable name of the stage.""" return self._properties.get("name") @property def entry_id(self): - """Union[str, None]: Unique ID for the stage within the plan.""" + """Optional[str]: Unique ID for the stage within the plan.""" return self._properties.get("id") @property def start(self): - """Union[Datetime, None]: Datetime when the stage started.""" + """Optional[Datetime]: Datetime when the stage started.""" if self._properties.get("startMs") is None: return None return _helpers._datetime_from_microseconds( @@ -3157,7 +3295,7 @@ def start(self): @property def end(self): - """Union[Datetime, None]: Datetime when the stage ended.""" + """Optional[Datetime]: Datetime when the stage ended.""" if self._properties.get("endMs") is None: return None return _helpers._datetime_from_microseconds( @@ -3176,33 +3314,33 @@ def input_stages(self): @property def parallel_inputs(self): - """Union[int, None]: Number of parallel input segments within + """Optional[int]: Number of parallel input segments within the stage. """ return _helpers._int_or_none(self._properties.get("parallelInputs")) @property def completed_parallel_inputs(self): - """Union[int, None]: Number of parallel input segments completed.""" + """Optional[int]: Number of parallel input segments completed.""" return _helpers._int_or_none(self._properties.get("completedParallelInputs")) @property def wait_ms_avg(self): - """Union[int, None]: Milliseconds the average worker spent waiting to + """Optional[int]: Milliseconds the average worker spent waiting to be scheduled. """ return _helpers._int_or_none(self._properties.get("waitMsAvg")) @property def wait_ms_max(self): - """Union[int, None]: Milliseconds the slowest worker spent waiting to + """Optional[int]: Milliseconds the slowest worker spent waiting to be scheduled. """ return _helpers._int_or_none(self._properties.get("waitMsMax")) @property def wait_ratio_avg(self): - """Union[float, None]: Ratio of time the average worker spent waiting + """Optional[float]: Ratio of time the average worker spent waiting to be scheduled, relative to the longest time spent by any worker in any stage of the overall plan. """ @@ -3210,7 +3348,7 @@ def wait_ratio_avg(self): @property def wait_ratio_max(self): - """Union[float, None]: Ratio of time the slowest worker spent waiting + """Optional[float]: Ratio of time the slowest worker spent waiting to be scheduled, relative to the longest time spent by any worker in any stage of the overall plan. """ @@ -3218,21 +3356,21 @@ def wait_ratio_max(self): @property def read_ms_avg(self): - """Union[int, None]: Milliseconds the average worker spent reading + """Optional[int]: Milliseconds the average worker spent reading input. """ return _helpers._int_or_none(self._properties.get("readMsAvg")) @property def read_ms_max(self): - """Union[int, None]: Milliseconds the slowest worker spent reading + """Optional[int]: Milliseconds the slowest worker spent reading input. """ return _helpers._int_or_none(self._properties.get("readMsMax")) @property def read_ratio_avg(self): - """Union[float, None]: Ratio of time the average worker spent reading + """Optional[float]: Ratio of time the average worker spent reading input, relative to the longest time spent by any worker in any stage of the overall plan. """ @@ -3240,7 +3378,7 @@ def read_ratio_avg(self): @property def read_ratio_max(self): - """Union[float, None]: Ratio of time the slowest worker spent reading + """Optional[float]: Ratio of time the slowest worker spent reading to be scheduled, relative to the longest time spent by any worker in any stage of the overall plan. """ @@ -3248,21 +3386,21 @@ def read_ratio_max(self): @property def compute_ms_avg(self): - """Union[int, None]: Milliseconds the average worker spent on CPU-bound + """Optional[int]: Milliseconds the average worker spent on CPU-bound processing. """ return _helpers._int_or_none(self._properties.get("computeMsAvg")) @property def compute_ms_max(self): - """Union[int, None]: Milliseconds the slowest worker spent on CPU-bound + """Optional[int]: Milliseconds the slowest worker spent on CPU-bound processing. """ return _helpers._int_or_none(self._properties.get("computeMsMax")) @property def compute_ratio_avg(self): - """Union[float, None]: Ratio of time the average worker spent on + """Optional[float]: Ratio of time the average worker spent on CPU-bound processing, relative to the longest time spent by any worker in any stage of the overall plan. """ @@ -3270,7 +3408,7 @@ def compute_ratio_avg(self): @property def compute_ratio_max(self): - """Union[float, None]: Ratio of time the slowest worker spent on + """Optional[float]: Ratio of time the slowest worker spent on CPU-bound processing, relative to the longest time spent by any worker in any stage of the overall plan. """ @@ -3278,21 +3416,21 @@ def compute_ratio_max(self): @property def write_ms_avg(self): - """Union[int, None]: Milliseconds the average worker spent writing + """Optional[int]: Milliseconds the average worker spent writing output data. """ return _helpers._int_or_none(self._properties.get("writeMsAvg")) @property def write_ms_max(self): - """Union[int, None]: Milliseconds the slowest worker spent writing + """Optional[int]: Milliseconds the slowest worker spent writing output data. """ return _helpers._int_or_none(self._properties.get("writeMsMax")) @property def write_ratio_avg(self): - """Union[float, None]: Ratio of time the average worker spent writing + """Optional[float]: Ratio of time the average worker spent writing output data, relative to the longest time spent by any worker in any stage of the overall plan. """ @@ -3300,7 +3438,7 @@ def write_ratio_avg(self): @property def write_ratio_max(self): - """Union[float, None]: Ratio of time the slowest worker spent writing + """Optional[float]: Ratio of time the slowest worker spent writing output data, relative to the longest time spent by any worker in any stage of the overall plan. """ @@ -3308,29 +3446,29 @@ def write_ratio_max(self): @property def records_read(self): - """Union[int, None]: Number of records read by this stage.""" + """Optional[int]: Number of records read by this stage.""" return _helpers._int_or_none(self._properties.get("recordsRead")) @property def records_written(self): - """Union[int, None]: Number of records written by this stage.""" + """Optional[int]: Number of records written by this stage.""" return _helpers._int_or_none(self._properties.get("recordsWritten")) @property def status(self): - """Union[str, None]: status of this stage.""" + """Optional[str]: status of this stage.""" return self._properties.get("status") @property def shuffle_output_bytes(self): - """Union[int, None]: Number of bytes written by this stage to + """Optional[int]: Number of bytes written by this stage to intermediate shuffle. """ return _helpers._int_or_none(self._properties.get("shuffleOutputBytes")) @property def shuffle_output_bytes_spilled(self): - """Union[int, None]: Number of bytes written by this stage to + """Optional[int]: Number of bytes written by this stage to intermediate shuffle and spilled to disk. """ return _helpers._int_or_none(self._properties.get("shuffleOutputBytesSpilled")) @@ -3351,9 +3489,8 @@ class TimelineEntry(object): point in time. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs + https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#querytimelinesample for the underlying API representation within query statistics. - """ def __init__(self): @@ -3365,11 +3502,11 @@ def from_api_repr(cls, resource): Args: resource(Dict[str: object]): - QueryTimelineSample representation returned from API + QueryTimelineSample representation returned from API. Returns: google.cloud.bigquery.TimelineEntry: - Timeline sample parsed from ``resource`` + Timeline sample parsed from ``resource``. """ entry = cls() entry._properties = resource @@ -3377,31 +3514,31 @@ def from_api_repr(cls, resource): @property def elapsed_ms(self): - """Union[int, None]: Milliseconds elapsed since start of query + """Optional[int]: Milliseconds elapsed since start of query execution.""" return _helpers._int_or_none(self._properties.get("elapsedMs")) @property def active_units(self): - """Union[int, None]: Current number of input units being processed + """Optional[int]: Current number of input units being processed by workers, reported as largest value since the last sample.""" return _helpers._int_or_none(self._properties.get("activeUnits")) @property def pending_units(self): - """Union[int, None]: Current number of input units remaining for + """Optional[int]: Current number of input units remaining for query stages active at this sample time.""" return _helpers._int_or_none(self._properties.get("pendingUnits")) @property def completed_units(self): - """Union[int, None]: Current number of input units completed by + """Optional[int]: Current number of input units completed by this query.""" return _helpers._int_or_none(self._properties.get("completedUnits")) @property def slot_millis(self): - """Union[int, None]: Cumulative slot-milliseconds consumed by + """Optional[int]: Cumulative slot-milliseconds consumed by this query.""" return _helpers._int_or_none(self._properties.get("totalSlotMs")) @@ -3414,7 +3551,7 @@ def from_api_repr(cls, resource, client): """Construct an UnknownJob from the JSON representation. Args: - resource (dict): JSON representation of a job. + resource (Dict): JSON representation of a job. client (google.cloud.bigquery.client.Client): Client connected to BigQuery API. @@ -3429,3 +3566,81 @@ def from_api_repr(cls, resource, client): resource["jobReference"] = job_ref_properties job._properties = resource return job + + +class ScriptStackFrame(object): + """Stack frame showing the line/column/procedure name where the current + evaluation happened. + + Args: + resource (Map[str, Any]): JSON representation of object. + """ + + def __init__(self, resource): + self._properties = resource + + @property + def procedure_id(self): + """Optional[str]: Name of the active procedure. + + Omitted if in a top-level script. + """ + return self._properties.get("procedureId") + + @property + def text(self): + """str: Text of the current statement/expression.""" + return self._properties.get("text") + + @property + def start_line(self): + """int: One-based start line.""" + return _helpers._int_or_none(self._properties.get("startLine")) + + @property + def start_column(self): + """int: One-based start column.""" + return _helpers._int_or_none(self._properties.get("startColumn")) + + @property + def end_line(self): + """int: One-based end line.""" + return _helpers._int_or_none(self._properties.get("endLine")) + + @property + def end_column(self): + """int: One-based end column.""" + return _helpers._int_or_none(self._properties.get("endColumn")) + + +class ScriptStatistics(object): + """Statistics for a child job of a script. + + Args: + resource (Map[str, Any]): JSON representation of object. + """ + + def __init__(self, resource): + self._properties = resource + + @property + def stack_frames(self): + """List[ScriptStackFrame]: Stack trace where the current evaluation + happened. + + Shows line/column/procedure name of each frame on the stack at the + point where the current evaluation happened. + + The leaf frame is first, the primary script is last. + """ + return [ + ScriptStackFrame(frame) for frame in self._properties.get("stackFrames", []) + ] + + @property + def evaluation_kind(self): + """str: Indicates the type of child job. + + Possible values include ``STATEMENT`` and ``EXPRESSION``. + """ + return self._properties.get("evaluationKind") diff --git a/bigquery/google/cloud/bigquery/magics.py b/bigquery/google/cloud/bigquery/magics.py index c238bb50317a..2a174cefeea3 100644 --- a/bigquery/google/cloud/bigquery/magics.py +++ b/bigquery/google/cloud/bigquery/magics.py @@ -28,7 +28,9 @@ * ```` (optional, line argument): variable to store the query results. The results are not displayed if - this parameter is used. + this parameter is used. If an error occurs during the query execution, + the corresponding ``QueryJob`` instance (if available) is stored in + the variable instead. * ``--project `` (optional, line argument): Project to use for running the query. Defaults to the context :attr:`~google.cloud.bigquery.magics.Context.project`. @@ -267,13 +269,29 @@ def default_query_job_config(self, value): context = Context() -def _print_error(error, destination_var=None): +def _handle_error(error, destination_var=None): + """Process a query execution error. + + Args: + error (Exception): + An exception that ocurred during the query exectution. + destination_var (Optional[str]): + The name of the IPython session variable to store the query job. + """ if destination_var: - print( - "Could not save output to variable '{}'.".format(destination_var), - file=sys.stderr, - ) - print("\nERROR:\n", error, file=sys.stderr) + query_job = getattr(error, "query_job", None) + + if query_job is not None: + IPython.get_ipython().push({destination_var: query_job}) + else: + # this is the case when previewing table rows by providing just + # table ID to cell magic + print( + "Could not save output to variable '{}'.".format(destination_var), + file=sys.stderr, + ) + + print("\nERROR:\n", str(error), file=sys.stderr) def _run_query(client, query, job_config=None): @@ -452,7 +470,7 @@ def _cell_magic(line, query): try: rows = client.list_rows(query, max_results=max_results) except Exception as ex: - _print_error(str(ex), args.destination_var) + _handle_error(ex, args.destination_var) return result = rows.to_dataframe(bqstorage_client=bqstorage_client) @@ -476,7 +494,7 @@ def _cell_magic(line, query): try: query_job = _run_query(client, query, job_config=job_config) except Exception as ex: - _print_error(str(ex), args.destination_var) + _handle_error(ex, args.destination_var) return if not args.verbose: diff --git a/bigquery/google/cloud/bigquery/model.py b/bigquery/google/cloud/bigquery/model.py index 4049a9232467..7bad752ea658 100644 --- a/bigquery/google/cloud/bigquery/model.py +++ b/bigquery/google/cloud/bigquery/model.py @@ -25,6 +25,7 @@ from google.api_core import datetime_helpers from google.cloud.bigquery import _helpers from google.cloud.bigquery_v2 import types +from google.cloud.bigquery.encryption_configuration import EncryptionConfiguration class Model(object): @@ -34,10 +35,7 @@ class Model(object): https://cloud.google.com/bigquery/docs/reference/rest/v2/models Args: - model_ref (Union[ \ - :class:`~google.cloud.bigquery.model.ModelReference`, \ - str, \ - ]): + model_ref (Union[google.cloud.bigquery.model.ModelReference, str]): A pointer to a model. If ``model_ref`` is a string, it must included a project ID, dataset ID, and model ID, each separated by ``.``. @@ -51,6 +49,7 @@ class Model(object): # have an exhaustive list of all mutable properties. "labels": "labels", "description": "description", + "encryption_configuration": "encryptionConfiguration", } def __init__(self, model_ref): @@ -256,6 +255,30 @@ def labels(self, value): value = {} self._properties["labels"] = value + @property + def encryption_configuration(self): + """google.cloud.bigquery.encryption_configuration.EncryptionConfiguration: Custom + encryption configuration for the model. + + Custom encryption configuration (e.g., Cloud KMS keys) or :data:`None` + if using default encryption. + + See `protecting data with Cloud KMS keys + `_ + in the BigQuery documentation. + """ + prop = self._properties.get("encryptionConfiguration") + if prop: + prop = EncryptionConfiguration.from_api_repr(prop) + return prop + + @encryption_configuration.setter + def encryption_configuration(self, value): + api_repr = value + if value: + api_repr = value.to_api_repr() + self._properties["encryptionConfiguration"] = api_repr + @classmethod def from_api_repr(cls, resource): """Factory: construct a model resource given its API representation @@ -299,7 +322,7 @@ class ModelReference(object): """ModelReferences are pointers to models. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/models + https://cloud.google.com/bigquery/docs/reference/rest/v2/models#modelreference """ def __init__(self): diff --git a/bigquery/google/cloud/bigquery/query.py b/bigquery/google/cloud/bigquery/query.py index 4039be33db8c..925f3e29d298 100644 --- a/bigquery/google/cloud/bigquery/query.py +++ b/bigquery/google/cloud/bigquery/query.py @@ -26,11 +26,10 @@ class UDFResource(object): """Describe a single user-defined function (UDF) resource. - :type udf_type: str - :param udf_type: the type of the resource ('inlineCode' or 'resourceUri') + Args: + udf_type (str): the type of the resource ('inlineCode' or 'resourceUri') - :type value: str - :param value: the inline code or resource URI. + value (str): the inline code or resource URI. See https://cloud.google.com/bigquery/user-defined-functions#api @@ -57,17 +56,19 @@ class _AbstractQueryParameter(object): def from_api_repr(cls, resource): """Factory: construct parameter from JSON resource. - :type resource: dict - :param resource: JSON mapping of parameter + Args: + resource (Dict): JSON mapping of parameter - :rtype: :class:`~google.cloud.bigquery.query.ScalarQueryParameter` + Returns: + google.cloud.bigquery.query.ScalarQueryParameter """ raise NotImplementedError def to_api_repr(self): """Construct JSON API representation for the parameter. - :rtype: dict + Returns: + Dict: JSON representation for the parameter. """ raise NotImplementedError @@ -75,18 +76,18 @@ def to_api_repr(self): class ScalarQueryParameter(_AbstractQueryParameter): """Named / positional query parameters for scalar values. - :type name: str or None - :param name: Parameter name, used via ``@foo`` syntax. If None, the - parameter can only be addressed via position (``?``). + Args: + name (Optional[str]): + Parameter name, used via ``@foo`` syntax. If None, the + parameter can only be addressed via position (``?``). - :type type_: str - :param type_: name of parameter type. One of 'STRING', 'INT64', - 'FLOAT64', 'NUMERIC', 'BOOL', 'TIMESTAMP', 'DATETIME', or - 'DATE'. + type_ (str): + name of parameter type. One of 'STRING', 'INT64', + 'FLOAT64', 'NUMERIC', 'BOOL', 'TIMESTAMP', 'DATETIME', or + 'DATE'. - :type value: str, int, float, :class:`decimal.Decimal`, bool, - :class:`datetime.datetime`, or :class:`datetime.date`. - :param value: the scalar parameter value. + value (Union[str, int, float, decimal.Decimal, bool, + datetime.datetime, datetime.date]): the scalar parameter value. """ def __init__(self, name, type_, value): @@ -98,19 +99,18 @@ def __init__(self, name, type_, value): def positional(cls, type_, value): """Factory for positional paramater. - :type type_: str - :param type_: - name of parameter type. One of 'STRING', 'INT64', - 'FLOAT64', 'NUMERIC', 'BOOL', 'TIMESTAMP', 'DATETIME', or - 'DATE'. + Args: + type_ (str): + name of parameter type. One of 'STRING', 'INT64', + 'FLOAT64', 'NUMERIC', 'BOOL', 'TIMESTAMP', 'DATETIME', or + 'DATE'. - :type value: str, int, float, :class:`decimal.Decimal`, bool, - :class:`datetime.datetime`, or - :class:`datetime.date`. - :param value: the scalar parameter value. + value (Union[str, int, float, decimal.Decimal, bool, + datetime.datetime, + datetime.date]): the scalar parameter value. - :rtype: :class:`~google.cloud.bigquery.query.ScalarQueryParameter` - :returns: instance without name + Returns: + google.cloud.bigquery.query.ScalarQueryParameter: instance without name """ return cls(None, type_, value) @@ -118,11 +118,11 @@ def positional(cls, type_, value): def from_api_repr(cls, resource): """Factory: construct parameter from JSON resource. - :type resource: dict - :param resource: JSON mapping of parameter + Args: + resource (Dict): JSON mapping of parameter - :rtype: :class:`~google.cloud.bigquery.query.ScalarQueryParameter` - :returns: instance + Returns: + google.cloud.bigquery.query.ScalarQueryParameter: instance """ name = resource.get("name") type_ = resource["parameterType"]["type"] @@ -140,8 +140,8 @@ def from_api_repr(cls, resource): def to_api_repr(self): """Construct JSON API representation for the parameter. - :rtype: dict - :returns: JSON mapping + Returns: + Dict: JSON mapping """ value = self.value converter = _SCALAR_VALUE_TO_JSON_PARAM.get(self.type_) @@ -161,8 +161,7 @@ def _key(self): Used to compute this instance's hashcode and evaluate equality. Returns: - tuple: The contents of this - :class:`~google.cloud.bigquery.query.ScalarQueryParameter`. + Tuple: The contents of this :class:`~google.cloud.bigquery.query.ScalarQueryParameter`. """ return (self.name, self.type_.upper(), self.value) @@ -181,17 +180,16 @@ def __repr__(self): class ArrayQueryParameter(_AbstractQueryParameter): """Named / positional query parameters for array values. - :type name: str or None - :param name: Parameter name, used via ``@foo`` syntax. If None, the - parameter can only be addressed via position (``?``). + Args: + name (Optional[str]): + Parameter name, used via ``@foo`` syntax. If None, the + parameter can only be addressed via position (``?``). - :type array_type: str - :param array_type: - name of type of array elements. One of `'STRING'`, `'INT64'`, - `'FLOAT64'`, `'NUMERIC'`, `'BOOL'`, `'TIMESTAMP'`, or `'DATE'`. + array_type (str): + name of type of array elements. One of `'STRING'`, `'INT64'`, + `'FLOAT64'`, `'NUMERIC'`, `'BOOL'`, `'TIMESTAMP'`, or `'DATE'`. - :type values: list of appropriate scalar type. - :param values: the parameter array values. + values (List[appropriate scalar type]): the parameter array values. """ def __init__(self, name, array_type, values): @@ -203,16 +201,15 @@ def __init__(self, name, array_type, values): def positional(cls, array_type, values): """Factory for positional parameters. - :type array_type: str - :param array_type: - name of type of array elements. One of `'STRING'`, `'INT64'`, - `'FLOAT64'`, `'NUMERIC'`, `'BOOL'`, `'TIMESTAMP'`, or `'DATE'`. + Args: + array_type (str): + name of type of array elements. One of `'STRING'`, `'INT64'`, + `'FLOAT64'`, `'NUMERIC'`, `'BOOL'`, `'TIMESTAMP'`, or `'DATE'`. - :type values: list of appropriate scalar type - :param values: the parameter array values. + values (List[appropriate scalar type]): the parameter array values. - :rtype: :class:`~google.cloud.bigquery.query.ArrayQueryParameter` - :returns: instance without name + Returns: + google.cloud.bigquery.query.ArrayQueryParameter: instance without name """ return cls(None, array_type, values) @@ -249,11 +246,11 @@ def _from_api_repr_scalar(cls, resource): def from_api_repr(cls, resource): """Factory: construct parameter from JSON resource. - :type resource: dict - :param resource: JSON mapping of parameter + Args: + resource (Dict): JSON mapping of parameter - :rtype: :class:`~google.cloud.bigquery.query.ArrayQueryParameter` - :returns: instance + Returns: + google.cloud.bigquery.query.ArrayQueryParameter: instance """ array_type = resource["parameterType"]["arrayType"]["type"] if array_type == "STRUCT": @@ -263,8 +260,8 @@ def from_api_repr(cls, resource): def to_api_repr(self): """Construct JSON API representation for the parameter. - :rtype: dict - :returns: JSON mapping + Returns: + Dict: JSON mapping """ values = self.values if self.array_type == "RECORD" or self.array_type == "STRUCT": @@ -291,8 +288,7 @@ def _key(self): Used to compute this instance's hashcode and evaluate equality. Returns: - tuple: The contents of this - :class:`~google.cloud.bigquery.query.ArrayQueryParameter`. + Tuple: The contents of this :class:`~google.cloud.bigquery.query.ArrayQueryParameter`. """ return (self.name, self.array_type.upper(), self.values) @@ -311,15 +307,16 @@ def __repr__(self): class StructQueryParameter(_AbstractQueryParameter): """Named / positional query parameters for struct values. - :type name: str or None - :param name: Parameter name, used via ``@foo`` syntax. If None, the - parameter can only be addressed via position (``?``). + Args: + name (Optional[str]): + Parameter name, used via ``@foo`` syntax. If None, the + parameter can only be addressed via position (``?``). - :type sub_params: - tuple of :class:`~google.cloud.bigquery.query.ScalarQueryParameter`, - :class:`~google.cloud.bigquery.query.ArrayQueryParameter`, or - :class:`~google.cloud.bigquery.query.StructQueryParameter` - :param sub_params: the sub-parameters for the struct + sub_params (Union[Tuple[ + google.cloud.bigquery.query.ScalarQueryParameter, + google.cloud.bigquery.query.ArrayQueryParameter, + google.cloud.bigquery.query.StructQueryParameter + ]]): the sub-parameters for the struct """ def __init__(self, name, *sub_params): @@ -341,15 +338,15 @@ def __init__(self, name, *sub_params): def positional(cls, *sub_params): """Factory for positional parameters. - :type sub_params: - tuple of - :class:`~google.cloud.bigquery.query.ScalarQueryParameter`, - :class:`~google.cloud.bigquery.query.ArrayQueryParameter`, or - :class:`~google.cloud.bigquery.query.StructQueryParameter` - :param sub_params: the sub-parameters for the struct + Args: + sub_params (Union[Tuple[ + google.cloud.bigquery.query.ScalarQueryParameter, + google.cloud.bigquery.query.ArrayQueryParameter, + google.cloud.bigquery.query.StructQueryParameter + ]]): the sub-parameters for the struct - :rtype: :class:`~google.cloud.bigquery.query.StructQueryParameter` - :returns: instance without name + Returns: + google.cloud.bigquery.query.StructQueryParameter: instance without name """ return cls(None, *sub_params) @@ -357,11 +354,11 @@ def positional(cls, *sub_params): def from_api_repr(cls, resource): """Factory: construct parameter from JSON resource. - :type resource: dict - :param resource: JSON mapping of parameter + Args: + resource (Dict): JSON mapping of parameter - :rtype: :class:`~google.cloud.bigquery.query.StructQueryParameter` - :returns: instance + Returns: + google.cloud.bigquery.query.StructQueryParameter: instance """ name = resource.get("name") instance = cls(name) @@ -397,8 +394,8 @@ def from_api_repr(cls, resource): def to_api_repr(self): """Construct JSON API representation for the parameter. - :rtype: dict - :returns: JSON mapping + Returns: + Dict: JSON mapping """ s_types = {} values = {} @@ -432,8 +429,7 @@ def _key(self): Used to compute this instance's hashcode and evaluate equality. Returns: - tuple: The contents of this - :class:`~google.cloud.biquery.ArrayQueryParameter`. + Tuple: The contents of this :class:`~google.cloud.biquery.ArrayQueryParameter`. """ return (self.name, self.struct_types, self.struct_values) @@ -468,8 +464,8 @@ def from_api_repr(cls, api_response): def project(self): """Project bound to the query job. - :rtype: str - :returns: the project that the query job is associated with. + Returns: + str: The project that the query job is associated with. """ return self._properties.get("jobReference", {}).get("projectId") @@ -478,11 +474,12 @@ def cache_hit(self): """Query results served from cache. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#cacheHit + https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#body.QueryResponse.FIELDS.cache_hit - :rtype: bool or ``NoneType`` - :returns: True if the query results were served from cache (None - until set by the server). + Returns: + Optional[bool]: + True if the query results were served from cache (None + until set by the server). """ return self._properties.get("cacheHit") @@ -491,11 +488,12 @@ def complete(self): """Server completed query. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#jobComplete + https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#body.QueryResponse.FIELDS.job_complete - :rtype: bool or ``NoneType`` - :returns: True if the query completed on the server (None - until set by the server). + Returns: + Optional[bool]: + True if the query completed on the server (None + until set by the server). """ return self._properties.get("jobComplete") @@ -504,11 +502,12 @@ def errors(self): """Errors generated by the query. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#errors + https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#body.QueryResponse.FIELDS.errors - :rtype: list of mapping, or ``NoneType`` - :returns: Mappings describing errors generated on the server (None - until set by the server). + Returns: + Optional[List[Mapping]]: + Mappings describing errors generated on the server (None + until set by the server). """ return self._properties.get("errors") @@ -517,10 +516,10 @@ def job_id(self): """Job ID of the query job these results are from. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#jobReference + https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#body.QueryResponse.FIELDS.job_reference - :rtype: string - :returns: Job ID of the query job. + Returns: + str: Job ID of the query job. """ return self._properties.get("jobReference", {}).get("jobId") @@ -529,10 +528,10 @@ def page_token(self): """Token for fetching next bach of results. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#pageToken + https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#body.QueryResponse.FIELDS.page_token - :rtype: str, or ``NoneType`` - :returns: Token generated on the server (None until set by the server). + Returns: + Optional[str]: Token generated on the server (None until set by the server). """ return self._properties.get("pageToken") @@ -541,10 +540,10 @@ def total_rows(self): """Total number of rows returned by the query. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#totalRows + https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#body.QueryResponse.FIELDS.total_rows - :rtype: int, or ``NoneType`` - :returns: Count generated on the server (None until set by the server). + Returns: + Optional[int}: Count generated on the server (None until set by the server). """ total_rows = self._properties.get("totalRows") if total_rows is not None: @@ -555,10 +554,10 @@ def total_bytes_processed(self): """Total number of bytes processed by the query. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#totalBytesProcessed + https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#body.QueryResponse.FIELDS.total_bytes_processed - :rtype: int, or ``NoneType`` - :returns: Count generated on the server (None until set by the server). + Returns: + Optional[int]: Count generated on the server (None until set by the server). """ total_bytes_processed = self._properties.get("totalBytesProcessed") if total_bytes_processed is not None: @@ -569,10 +568,10 @@ def num_dml_affected_rows(self): """Total number of rows affected by a DML query. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#numDmlAffectedRows + https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#body.QueryResponse.FIELDS.num_dml_affected_rows - :rtype: int, or ``NoneType`` - :returns: Count generated on the server (None until set by the server). + Returns: + Optional[int]: Count generated on the server (None until set by the server). """ num_dml_affected_rows = self._properties.get("numDmlAffectedRows") if num_dml_affected_rows is not None: @@ -583,10 +582,11 @@ def rows(self): """Query results. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#rows + https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#body.QueryResponse.FIELDS.rows - :rtype: list of :class:`~google.cloud.bigquery.table.Row` - :returns: fields describing the schema (None until set by the server). + Returns: + Optional[List[google.cloud.bigquery.table.Row]]: + Fields describing the schema (None until set by the server). """ return _rows_from_json(self._properties.get("rows", ()), self.schema) @@ -595,18 +595,19 @@ def schema(self): """Schema for query results. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#schema + https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query#body.QueryResponse.FIELDS.schema - :rtype: list of :class:`SchemaField`, or ``NoneType`` - :returns: fields describing the schema (None until set by the server). + Returns: + Optional[List[SchemaField]]: + Fields describing the schema (None until set by the server). """ return _parse_schema_resource(self._properties.get("schema", {})) def _set_properties(self, api_response): """Update properties from resource in body of ``api_response`` - :type api_response: dict - :param api_response: response returned from an API call + Args: + api_response (Dict): response returned from an API call """ job_id_present = ( "jobReference" in api_response diff --git a/bigquery/google/cloud/bigquery/routine.py b/bigquery/google/cloud/bigquery/routine.py index d5bb752dfddb..044368e75108 100644 --- a/bigquery/google/cloud/bigquery/routine.py +++ b/bigquery/google/cloud/bigquery/routine.py @@ -31,10 +31,7 @@ class Routine(object): https://cloud.google.com/bigquery/docs/reference/rest/v2/routines Args: - routine_ref (Union[ \ - str, \ - google.cloud.bigquery.routine.RoutineReference, \ - ]): + routine_ref (Union[str, google.cloud.bigquery.routine.RoutineReference]): A pointer to a routine. If ``routine_ref`` is a string, it must included a project ID, dataset ID, and routine ID, each separated by ``.``. @@ -186,7 +183,7 @@ def return_type(self): time. See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/routines#resource-routine + https://cloud.google.com/bigquery/docs/reference/rest/v2/routines#Routine.FIELDS.return_type """ resource = self._properties.get(self._PROPERTY_TO_API_FIELD["return_type"]) if not resource: @@ -262,8 +259,7 @@ def to_api_repr(self): """Construct the API resource representation of this routine. Returns: - Dict[str, object]: - Routine represented as an API resource. + Dict[str, object]: Routine represented as an API resource. """ return self._properties @@ -281,7 +277,7 @@ class RoutineArgument(object): """Input/output argument of a function or a stored procedure. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/routines + https://cloud.google.com/bigquery/docs/reference/rest/v2/routines#argument Args: ``**kwargs`` (Dict): @@ -321,7 +317,7 @@ def kind(self): ``ANY_TYPE``. See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/routines#ArgumentKind + https://cloud.google.com/bigquery/docs/reference/rest/v2/routines#Argument.FIELDS.argument_kind """ return self._properties.get(self._PROPERTY_TO_API_FIELD["kind"]) @@ -344,7 +340,7 @@ def data_type(self): of a variable, e.g., a function argument. See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/StandardSqlDataType + https://cloud.google.com/bigquery/docs/reference/rest/v2/routines#Argument.FIELDS.data_type """ resource = self._properties.get(self._PROPERTY_TO_API_FIELD["data_type"]) if not resource: @@ -366,8 +362,7 @@ def from_api_repr(cls, resource): """Factory: construct a routine argument given its API representation. Args: - resource (Dict[str, object]): - Resource, as returned from the API. + resource (Dict[str, object]): Resource, as returned from the API. Returns: google.cloud.bigquery.routine.RoutineArgument: @@ -381,8 +376,7 @@ def to_api_repr(self): """Construct the API resource representation of this routine argument. Returns: - Dict[str, object]: - Routine argument represented as an API resource. + Dict[str, object]: Routine argument represented as an API resource. """ return self._properties @@ -406,7 +400,7 @@ class RoutineReference(object): """A pointer to a routine. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/routines + https://cloud.google.com/bigquery/docs/reference/rest/v2/routines#routinereference """ def __init__(self): @@ -485,8 +479,7 @@ def to_api_repr(self): """Construct the API resource representation of this routine reference. Returns: - Dict[str, object]: - Routine reference represented as an API resource. + Dict[str, object]: Routine reference represented as an API resource. """ return self._properties diff --git a/bigquery/google/cloud/bigquery/schema.py b/bigquery/google/cloud/bigquery/schema.py index e0673d85baf6..d766cb542608 100644 --- a/bigquery/google/cloud/bigquery/schema.py +++ b/bigquery/google/cloud/bigquery/schema.py @@ -14,6 +14,8 @@ """Schemas for BigQuery tables / queries.""" +import collections + from google.cloud.bigquery_v2 import types @@ -51,14 +53,14 @@ class SchemaField(object): name (str): the name of the field. field_type (str): the type of the field. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#schema.fields.type + https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#TableFieldSchema.FIELDS.type mode (str): the mode of the field. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#schema.fields.mode + https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#TableFieldSchema.FIELDS.mode - description (Optional[str]):description for the field. + description (Optional[str]): description for the field. - fields (Tuple[:class:`~google.cloud.bigquery.schema.SchemaField`]): + fields (Tuple[google.cloud.bigquery.schema.SchemaField]): subfields (requires ``field_type`` of 'RECORD'). """ @@ -79,8 +81,7 @@ def from_api_repr(cls, api_repr): :meth:`to_api_repr`. Returns: - google.cloud.biquery.schema.SchemaField: - The ``SchemaField`` object. + google.cloud.biquery.schema.SchemaField: The ``SchemaField`` object. """ # Handle optional properties with default values mode = api_repr.get("mode", "NULLABLE") @@ -104,7 +105,7 @@ def field_type(self): """str: The type of the field. See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#schema.fields.type + https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#TableFieldSchema.FIELDS.type """ return self._field_type @@ -113,7 +114,7 @@ def mode(self): """str: The mode of the field. See: - https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#schema.fields.mode + https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#TableFieldSchema.FIELDS.mode """ return self._mode @@ -139,8 +140,7 @@ def to_api_repr(self): """Return a dictionary representing this schema field. Returns: - dict: A dictionary representing the SchemaField in a serialized - form. + Dict: A dictionary representing the SchemaField in a serialized form. """ # Put together the basic representation. See http://bit.ly/2hOAT5u. answer = { @@ -164,8 +164,7 @@ def _key(self): Used to compute this instance's hashcode and evaluate equality. Returns: - tuple: The contents of this - :class:`~google.cloud.bigquery.schema.SchemaField`. + Tuple: The contents of this :class:`~google.cloud.bigquery.schema.SchemaField`. """ return ( self._name, @@ -229,11 +228,11 @@ def _parse_schema_resource(info): """Parse a resource fragment into a schema field. Args: - info: (Mapping[str->dict]): should contain a "fields" key to be parsed + info: (Mapping[str, Dict]): should contain a "fields" key to be parsed Returns: - (Union[Sequence[:class:`google.cloud.bigquery.schema.SchemaField`],None]) - a list of parsed fields, or ``None`` if no "fields" key found. + Optional[Sequence[google.cloud.bigquery.schema.SchemaField`]: + A list of parsed fields, or ``None`` if no "fields" key found. """ if "fields" not in info: return () @@ -253,10 +252,42 @@ def _build_schema_resource(fields): """Generate a resource fragment for a schema. Args: - fields [Sequence[:class:`~google.cloud.bigquery.schema.SchemaField`]): - schema to be dumped + fields (Sequence[google.cloud.bigquery.schema.SchemaField): schema to be dumped. - Returns: (Sequence[dict]) - mappings describing the schema of the supplied fields. + Returns: + Sequence[Dict]: Mappings describing the schema of the supplied fields. """ return [field.to_api_repr() for field in fields] + + +def _to_schema_fields(schema): + """Coerce `schema` to a list of schema field instances. + + Args: + schema(Sequence[Union[ \ + :class:`~google.cloud.bigquery.schema.SchemaField`, \ + Mapping[str, Any] \ + ]]): + Table schema to convert. If some items are passed as mappings, + their content must be compatible with + :meth:`~google.cloud.bigquery.schema.SchemaField.from_api_repr`. + + Returns: + Sequence[:class:`~google.cloud.bigquery.schema.SchemaField`] + + Raises: + Exception: If ``schema`` is not a sequence, or if any item in the + sequence is not a :class:`~google.cloud.bigquery.schema.SchemaField` + instance or a compatible mapping representation of the field. + """ + for field in schema: + if not isinstance(field, (SchemaField, collections.Mapping)): + raise ValueError( + "Schema items must either be fields or compatible " + "mapping representations." + ) + + return [ + field if isinstance(field, SchemaField) else SchemaField.from_api_repr(field) + for field in schema + ] diff --git a/bigquery/google/cloud/bigquery/table.py b/bigquery/google/cloud/bigquery/table.py index 71fc9ef945d4..2f2ee50cc89e 100644 --- a/bigquery/google/cloud/bigquery/table.py +++ b/bigquery/google/cloud/bigquery/table.py @@ -51,10 +51,11 @@ import google.cloud._helpers from google.cloud.bigquery import _helpers from google.cloud.bigquery import _pandas_helpers -from google.cloud.bigquery.schema import SchemaField from google.cloud.bigquery.schema import _build_schema_resource from google.cloud.bigquery.schema import _parse_schema_resource +from google.cloud.bigquery.schema import _to_schema_fields from google.cloud.bigquery.external_config import ExternalConfig +from google.cloud.bigquery.encryption_configuration import EncryptionConfiguration _LOGGER = logging.getLogger(__name__) @@ -113,78 +114,11 @@ def _view_use_legacy_sql_getter(table): return True -class EncryptionConfiguration(object): - """Custom encryption configuration (e.g., Cloud KMS keys). - - Args: - kms_key_name (str): resource ID of Cloud KMS key used for encryption - """ - - def __init__(self, kms_key_name=None): - self._properties = {} - if kms_key_name is not None: - self._properties["kmsKeyName"] = kms_key_name - - @property - def kms_key_name(self): - """str: Resource ID of Cloud KMS key - - Resource ID of Cloud KMS key or :data:`None` if using default - encryption. - """ - return self._properties.get("kmsKeyName") - - @kms_key_name.setter - def kms_key_name(self, value): - self._properties["kmsKeyName"] = value - - @classmethod - def from_api_repr(cls, resource): - """Construct an encryption configuration from its API representation - - Args: - resource (Dict[str, object]): - An encryption configuration representation as returned from - the API. - - Returns: - google.cloud.bigquery.table.EncryptionConfiguration: - An encryption configuration parsed from ``resource``. - """ - config = cls() - config._properties = copy.deepcopy(resource) - return config - - def to_api_repr(self): - """Construct the API resource representation of this encryption - configuration. - - Returns: - Dict[str, object]: - Encryption configuration as represented as an API resource - """ - return copy.deepcopy(self._properties) - - def __eq__(self, other): - if not isinstance(other, EncryptionConfiguration): - return NotImplemented - return self.kms_key_name == other.kms_key_name - - def __ne__(self, other): - return not self == other - - def __hash__(self): - return hash(self.kms_key_name) - - def __repr__(self): - return "EncryptionConfiguration({})".format(self.kms_key_name) - - class TableReference(object): """TableReferences are pointers to tables. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/tables + https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#tablereference Args: dataset_ref (google.cloud.bigquery.dataset.DatasetReference): @@ -364,18 +298,20 @@ class Table(object): """Tables represent a set of rows whose values correspond to a schema. See - https://cloud.google.com/bigquery/docs/reference/rest/v2/tables + https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#resource-table Args: - table_ref (Union[ \ - :class:`~google.cloud.bigquery.table.TableReference`, \ - str, \ - ]): + table_ref (Union[google.cloud.bigquery.table.TableReference, str]): A pointer to a table. If ``table_ref`` is a string, it must included a project ID, dataset ID, and table ID, each separated by ``.``. - schema (List[google.cloud.bigquery.schema.SchemaField]): - The table's schema + schema (Optional[Sequence[Union[ \ + :class:`~google.cloud.bigquery.schema.SchemaField`, \ + Mapping[str, Any] \ + ]]]): + The table's schema. If any item is a mapping, its content must be + compatible with + :meth:`~google.cloud.bigquery.schema.SchemaField.from_api_repr`. """ _PROPERTY_TO_API_FIELD = { @@ -388,6 +324,7 @@ class Table(object): "view_query": "view", "external_data_configuration": "externalDataConfiguration", "encryption_configuration": "encryptionConfiguration", + "require_partition_filter": "requirePartitionFilter", } def __init__(self, table_ref, schema=None): @@ -423,15 +360,31 @@ def path(self): self.table_id, ) + @property + def require_partition_filter(self): + """bool: If set to true, queries over the partitioned table require a + partition filter that can be used for partition elimination to be + specified. + """ + return self._properties.get("requirePartitionFilter") + + @require_partition_filter.setter + def require_partition_filter(self, value): + self._properties["requirePartitionFilter"] = value + @property def schema(self): - """List[google.cloud.bigquery.schema.SchemaField]: Table's schema. + """Sequence[Union[ \ + :class:`~google.cloud.bigquery.schema.SchemaField`, \ + Mapping[str, Any] \ + ]]: + Table's schema. Raises: - TypeError: If 'value' is not a sequence - ValueError: - If any item in the sequence is not a - :class:`~google.cloud.bigquery.schema.SchemaField` + Exception: + If ``schema`` is not a sequence, or if any item in the sequence + is not a :class:`~google.cloud.bigquery.schema.SchemaField` + instance or a compatible mapping representation of the field. """ prop = self._properties.get("schema") if not prop: @@ -443,9 +396,8 @@ def schema(self): def schema(self, value): if value is None: self._properties["schema"] = None - elif not all(isinstance(field, SchemaField) for field in value): - raise ValueError("Schema items must be fields") else: + value = _to_schema_fields(value) self._properties["schema"] = {"fields": _build_schema_resource(value)} @property @@ -469,7 +421,7 @@ def labels(self, value): @property def encryption_configuration(self): - """google.cloud.bigquery.table.EncryptionConfiguration: Custom + """google.cloud.bigquery.encryption_configuration.EncryptionConfiguration: Custom encryption configuration for the table. Custom encryption configuration (e.g., Cloud KMS keys) or :data:`None` @@ -561,14 +513,54 @@ def table_type(self): """ return self._properties.get("type") + @property + def range_partitioning(self): + """Optional[google.cloud.bigquery.table.RangePartitioning]: + Configures range-based partitioning for a table. + + .. note:: + **Beta**. The integer range partitioning feature is in a + pre-release state and might change or have limited support. + + Only specify at most one of + :attr:`~google.cloud.bigquery.table.Table.time_partitioning` or + :attr:`~google.cloud.bigquery.table.Table.range_partitioning`. + + Raises: + ValueError: + If the value is not + :class:`~google.cloud.bigquery.table.RangePartitioning` or + :data:`None`. + """ + resource = self._properties.get("rangePartitioning") + if resource is not None: + return RangePartitioning(_properties=resource) + + @range_partitioning.setter + def range_partitioning(self, value): + resource = value + if isinstance(value, RangePartitioning): + resource = value._properties + elif value is not None: + raise ValueError( + "Expected value to be RangePartitioning or None, got {}.".format(value) + ) + self._properties["rangePartitioning"] = resource + @property def time_partitioning(self): - """google.cloud.bigquery.table.TimePartitioning: Configures time-based + """Optional[google.cloud.bigquery.table.TimePartitioning]: Configures time-based partitioning for a table. + Only specify at most one of + :attr:`~google.cloud.bigquery.table.Table.time_partitioning` or + :attr:`~google.cloud.bigquery.table.Table.range_partitioning`. + Raises: ValueError: - If the value is not :class:`TimePartitioning` or :data:`None`. + If the value is not + :class:`~google.cloud.bigquery.table.TimePartitioning` or + :data:`None`. """ prop = self._properties.get("timePartitioning") if prop is not None: @@ -1300,6 +1292,13 @@ class RowIterator(HTTPIterator): api_request (Callable[google.cloud._http.JSONConnection.api_request]): The function to use to make API requests. path (str): The method path to query for the list of items. + schema (Sequence[Union[ \ + :class:`~google.cloud.bigquery.schema.SchemaField`, \ + Mapping[str, Any] \ + ]]): + The table's schema. If any item is a mapping, its content must be + compatible with + :meth:`~google.cloud.bigquery.schema.SchemaField.from_api_repr`. page_token (str): A token identifying a page in a result set to start fetching results from. max_results (int, optional): The maximum number of results to fetch. @@ -1309,14 +1308,12 @@ class RowIterator(HTTPIterator): extra_params (Dict[str, object]): Extra query string parameters for the API call. table (Union[ \ - :class:`~google.cloud.bigquery.table.Table`, \ - :class:`~google.cloud.bigquery.table.TableReference`, \ + google.cloud.bigquery.table.Table, \ + google.cloud.bigquery.table.TableReference, \ ]): Optional. The table which these rows belong to, or a reference to it. Used to call the BigQuery Storage API to fetch rows. - selected_fields (Sequence[ \ - google.cloud.bigquery.schema.SchemaField, \ - ]): + selected_fields (Sequence[google.cloud.bigquery.schema.SchemaField]): Optional. A subset of columns to select from this table. """ @@ -1346,6 +1343,7 @@ def __init__( page_start=_rows_page_start, next_token="pageToken", ) + schema = _to_schema_fields(schema) self._field_to_index = _helpers._field_to_index_mapping(schema) self._page_size = page_size self._preserve_order = False @@ -1481,9 +1479,7 @@ def to_arrow(self, progress_bar_type=None, bqstorage_client=None): ``'tqdm_gui'`` Use the :func:`tqdm.tqdm_gui` function to display a progress bar as a graphical dialog box. - bqstorage_client ( \ - google.cloud.bigquery_storage_v1beta1.BigQueryStorageClient \ - ): + bqstorage_client (google.cloud.bigquery_storage_v1beta1.BigQueryStorageClient): **Beta Feature** Optional. A BigQuery Storage API client. If supplied, use the faster BigQuery Storage API to fetch rows from BigQuery. This API is a billable API. @@ -1501,8 +1497,7 @@ def to_arrow(self, progress_bar_type=None, bqstorage_client=None): from the destination table's schema. Raises: - ValueError: - If the :mod:`pyarrow` library cannot be imported. + ValueError: If the :mod:`pyarrow` library cannot be imported. ..versionadded:: 1.17.0 """ @@ -1567,9 +1562,7 @@ def to_dataframe(self, bqstorage_client=None, dtypes=None, progress_bar_type=Non """Create a pandas DataFrame by loading all pages of a query. Args: - bqstorage_client ( \ - google.cloud.bigquery_storage_v1beta1.BigQueryStorageClient \ - ): + bqstorage_client (google.cloud.bigquery_storage_v1beta1.BigQueryStorageClient): **Beta Feature** Optional. A BigQuery Storage API client. If supplied, use the faster BigQuery Storage API to fetch rows from BigQuery. This API is a billable API. @@ -1584,9 +1577,7 @@ def to_dataframe(self, bqstorage_client=None, dtypes=None, progress_bar_type=Non query result tables with the BQ Storage API. When a problem is encountered reading a table, the tabledata.list method from the BigQuery API is used, instead. - dtypes ( \ - Map[str, Union[str, pandas.Series.dtype]] \ - ): + dtypes (Map[str, Union[str, pandas.Series.dtype]]): Optional. A dictionary of column names pandas ``dtype``s. The provided ``dtype`` is used when constructing the series for the column specified. Otherwise, the default pandas behavior @@ -1680,12 +1671,10 @@ def to_arrow(self, progress_bar_type=None): """[Beta] Create an empty class:`pyarrow.Table`. Args: - progress_bar_type (Optional[str]): - Ignored. Added for compatibility with RowIterator. + progress_bar_type (Optional[str]): Ignored. Added for compatibility with RowIterator. Returns: - pyarrow.Table: - An empty :class:`pyarrow.Table`. + pyarrow.Table: An empty :class:`pyarrow.Table`. """ if pyarrow is None: raise ValueError(_NO_PYARROW_ERROR) @@ -1695,16 +1684,12 @@ def to_dataframe(self, bqstorage_client=None, dtypes=None, progress_bar_type=Non """Create an empty dataframe. Args: - bqstorage_client (Any): - Ignored. Added for compatibility with RowIterator. - dtypes (Any): - Ignored. Added for compatibility with RowIterator. - progress_bar_type (Any): - Ignored. Added for compatibility with RowIterator. + bqstorage_client (Any): Ignored. Added for compatibility with RowIterator. + dtypes (Any): Ignored. Added for compatibility with RowIterator. + progress_bar_type (Any): Ignored. Added for compatibility with RowIterator. Returns: - pandas.DataFrame: - An empty :class:`~pandas.DataFrame`. + pandas.DataFrame: An empty :class:`~pandas.DataFrame`. """ if pandas is None: raise ValueError(_NO_PANDAS_ERROR) @@ -1714,6 +1699,147 @@ def __iter__(self): return iter(()) +class PartitionRange(object): + """Definition of the ranges for range partitioning. + + .. note:: + **Beta**. The integer range partitioning feature is in a pre-release + state and might change or have limited support. + + Args: + start (Optional[int]): + Sets the + :attr:`~google.cloud.bigquery.table.PartitionRange.start` + property. + end (Optional[int]): + Sets the + :attr:`~google.cloud.bigquery.table.PartitionRange.end` + property. + interval (Optional[int]): + Sets the + :attr:`~google.cloud.bigquery.table.PartitionRange.interval` + property. + _properties (Optional[dict]): + Private. Used to construct object from API resource. + """ + + def __init__(self, start=None, end=None, interval=None, _properties=None): + if _properties is None: + _properties = {} + self._properties = _properties + + if start is not None: + self.start = start + if end is not None: + self.end = end + if interval is not None: + self.interval = interval + + @property + def start(self): + """int: The start of range partitioning, inclusive.""" + return _helpers._int_or_none(self._properties.get("start")) + + @start.setter + def start(self, value): + self._properties["start"] = _helpers._str_or_none(value) + + @property + def end(self): + """int: The end of range partitioning, exclusive.""" + return _helpers._int_or_none(self._properties.get("end")) + + @end.setter + def end(self, value): + self._properties["end"] = _helpers._str_or_none(value) + + @property + def interval(self): + """int: The width of each interval.""" + return _helpers._int_or_none(self._properties.get("interval")) + + @interval.setter + def interval(self, value): + self._properties["interval"] = _helpers._str_or_none(value) + + def _key(self): + return tuple(sorted(self._properties.items())) + + def __repr__(self): + key_vals = ["{}={}".format(key, val) for key, val in self._key()] + return "PartitionRange({})".format(", ".join(key_vals)) + + +class RangePartitioning(object): + """Range-based partitioning configuration for a table. + + .. note:: + **Beta**. The integer range partitioning feature is in a pre-release + state and might change or have limited support. + + Args: + range_ (Optional[google.cloud.bigquery.table.PartitionRange]): + Sets the + :attr:`google.cloud.bigquery.table.RangePartitioning.range_` + property. + field (Optional[str]): + Sets the + :attr:`google.cloud.bigquery.table.RangePartitioning.field` + property. + _properties (Optional[dict]): + Private. Used to construct object from API resource. + """ + + def __init__(self, range_=None, field=None, _properties=None): + if _properties is None: + _properties = {} + self._properties = _properties + + if range_ is not None: + self.range_ = range_ + if field is not None: + self.field = field + + # Trailing underscore to prevent conflict with built-in range() function. + @property + def range_(self): + """google.cloud.bigquery.table.PartitionRange: Defines the + ranges for range partitioning. + + Raises: + ValueError: + If the value is not a :class:`PartitionRange`. + """ + range_properties = self._properties.setdefault("range", {}) + return PartitionRange(_properties=range_properties) + + @range_.setter + def range_(self, value): + if not isinstance(value, PartitionRange): + raise ValueError("Expected a PartitionRange, but got {}.".format(value)) + self._properties["range"] = value._properties + + @property + def field(self): + """str: The table is partitioned by this field. + + The field must be a top-level ``NULLABLE`` / ``REQUIRED`` field. The + only supported type is ``INTEGER`` / ``INT64``. + """ + return self._properties.get("field") + + @field.setter + def field(self, value): + self._properties["field"] = value + + def _key(self): + return (("field", self.field), ("range_", self.range_)) + + def __repr__(self): + key_vals = ["{}={}".format(key, repr(val)) for key, val in self._key()] + return "RangePartitioning({})".format(", ".join(key_vals)) + + class TimePartitioningType(object): """Specifies the type of time partitioning to perform.""" @@ -1738,9 +1864,9 @@ class TimePartitioning(object): Number of milliseconds for which to keep the storage for a partition. require_partition_filter (bool, optional): - If set to true, queries over the partitioned table require a - partition filter that can be used for partition elimination to be - specified. + DEPRECATED: Use + :attr:`~google.cloud.bigquery.table.Table.require_partition_filter`, + instead. """ def __init__( @@ -1793,11 +1919,33 @@ def expiration_ms(self, value): @property def require_partition_filter(self): """bool: Specifies whether partition filters are required for queries + + DEPRECATED: Use + :attr:`~google.cloud.bigquery.table.Table.require_partition_filter`, + instead. """ + warnings.warn( + ( + "TimePartitioning.require_partition_filter will be removed in " + "future versions. Please use Table.require_partition_filter " + "instead." + ), + PendingDeprecationWarning, + stacklevel=2, + ) return self._properties.get("requirePartitionFilter") @require_partition_filter.setter def require_partition_filter(self, value): + warnings.warn( + ( + "TimePartitioning.require_partition_filter will be removed in " + "future versions. Please use Table.require_partition_filter " + "instead." + ), + PendingDeprecationWarning, + stacklevel=2, + ) self._properties["requirePartitionFilter"] = value @classmethod @@ -1872,14 +2020,12 @@ def _item_to_row(iterator, resource): added to the iterator after being created, which should be done by the caller. - :type iterator: :class:`~google.api_core.page_iterator.Iterator` - :param iterator: The iterator that is currently in use. - - :type resource: dict - :param resource: An item to be converted to a row. + Args: + iterator (google.api_core.page_iterator.Iterator): The iterator that is currently in use. + resource (Dict): An item to be converted to a row. - :rtype: :class:`~google.cloud.bigquery.table.Row` - :returns: The next row in the page. + Returns: + google.cloud.bigquery.table.Row: The next row in the page. """ return Row( _helpers._row_tuple_from_json(resource, iterator.schema), @@ -1910,14 +2056,10 @@ def get_column_data(field_index, field): def _rows_page_start(iterator, page, response): """Grab total rows when :class:`~google.cloud.iterator.Page` starts. - :type iterator: :class:`~google.api_core.page_iterator.Iterator` - :param iterator: The iterator that is currently in use. - - :type page: :class:`~google.api_core.page_iterator.Page` - :param page: The page that was just created. - - :type response: dict - :param response: The JSON API response for a page of rows in a table. + Args: + iterator (google.api_core.page_iterator.Iterator): The iterator that is currently in use. + page (google.api_core.page_iterator.Page): The page that was just created. + response (Dict): The JSON API response for a page of rows in a table. """ # Make a (lazy) copy of the page in column-oriented format for use in data # science packages. diff --git a/bigquery/google/cloud/bigquery_v2/proto/encryption_config_pb2.py b/bigquery/google/cloud/bigquery_v2/proto/encryption_config_pb2.py index b04cc3d58e9c..f7b26be5547f 100644 --- a/bigquery/google/cloud/bigquery_v2/proto/encryption_config_pb2.py +++ b/bigquery/google/cloud/bigquery_v2/proto/encryption_config_pb2.py @@ -62,7 +62,7 @@ extension_scope=None, serialized_options=_b("\340A\001"), file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], diff --git a/bigquery/google/cloud/bigquery_v2/proto/model_pb2.py b/bigquery/google/cloud/bigquery_v2/proto/model_pb2.py index 98dfa4b1a22c..3994660ec46d 100644 --- a/bigquery/google/cloud/bigquery_v2/proto/model_pb2.py +++ b/bigquery/google/cloud/bigquery_v2/proto/model_pb2.py @@ -290,7 +290,7 @@ fields=[], extensions=[], nested_types=[], - enum_types=[_MODEL_KMEANSENUMS_KMEANSINITIALIZATIONMETHOD], + enum_types=[_MODEL_KMEANSENUMS_KMEANSINITIALIZATIONMETHOD,], serialized_options=None, is_extendable=False, syntax="proto3", @@ -819,7 +819,7 @@ ), ], extensions=[], - nested_types=[_MODEL_BINARYCLASSIFICATIONMETRICS_BINARYCONFUSIONMATRIX], + nested_types=[_MODEL_BINARYCLASSIFICATIONMETRICS_BINARYCONFUSIONMATRIX,], enum_types=[], serialized_options=None, is_extendable=False, @@ -1046,7 +1046,7 @@ ), ], extensions=[], - nested_types=[_MODEL_MULTICLASSCLASSIFICATIONMETRICS_CONFUSIONMATRIX], + nested_types=[_MODEL_MULTICLASSCLASSIFICATIONMETRICS_CONFUSIONMATRIX,], enum_types=[], serialized_options=None, is_extendable=False, @@ -1137,11 +1137,11 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[ - _MODEL_CLUSTERINGMETRICS_CLUSTER_FEATUREVALUE_CATEGORICALVALUE_CATEGORYCOUNT + _MODEL_CLUSTERINGMETRICS_CLUSTER_FEATUREVALUE_CATEGORICALVALUE_CATEGORYCOUNT, ], enum_types=[], serialized_options=None, @@ -1216,7 +1216,7 @@ ), ], extensions=[], - nested_types=[_MODEL_CLUSTERINGMETRICS_CLUSTER_FEATUREVALUE_CATEGORICALVALUE], + nested_types=[_MODEL_CLUSTERINGMETRICS_CLUSTER_FEATUREVALUE_CATEGORICALVALUE,], enum_types=[], serialized_options=None, is_extendable=False, @@ -1229,7 +1229,7 @@ index=0, containing_type=None, fields=[], - ) + ), ], serialized_start=3759, serialized_end=4209, @@ -1298,7 +1298,7 @@ ), ], extensions=[], - nested_types=[_MODEL_CLUSTERINGMETRICS_CLUSTER_FEATUREVALUE], + nested_types=[_MODEL_CLUSTERINGMETRICS_CLUSTER_FEATUREVALUE,], enum_types=[], serialized_options=None, is_extendable=False, @@ -1372,7 +1372,7 @@ ), ], extensions=[], - nested_types=[_MODEL_CLUSTERINGMETRICS_CLUSTER], + nested_types=[_MODEL_CLUSTERINGMETRICS_CLUSTER,], enum_types=[], serialized_options=None, is_extendable=False, @@ -1477,7 +1477,7 @@ index=0, containing_type=None, fields=[], - ) + ), ], serialized_start=4212, serialized_end=4617, @@ -1926,7 +1926,7 @@ ), ], extensions=[], - nested_types=[_MODEL_TRAININGRUN_TRAININGOPTIONS_LABELCLASSWEIGHTSENTRY], + nested_types=[_MODEL_TRAININGRUN_TRAININGOPTIONS_LABELCLASSWEIGHTSENTRY,], enum_types=[], serialized_options=None, is_extendable=False, @@ -2128,7 +2128,7 @@ ), ], extensions=[], - nested_types=[_MODEL_TRAININGRUN_ITERATIONRESULT_CLUSTERINFO], + nested_types=[_MODEL_TRAININGRUN_ITERATIONRESULT_CLUSTERINFO,], enum_types=[], serialized_options=None, is_extendable=False, diff --git a/bigquery/google/cloud/bigquery_v2/proto/standard_sql_pb2.py b/bigquery/google/cloud/bigquery_v2/proto/standard_sql_pb2.py index 19ca829a4061..3b394b8bf10e 100644 --- a/bigquery/google/cloud/bigquery_v2/proto/standard_sql_pb2.py +++ b/bigquery/google/cloud/bigquery_v2/proto/standard_sql_pb2.py @@ -161,7 +161,7 @@ ], extensions=[], nested_types=[], - enum_types=[_STANDARDSQLDATATYPE_TYPEKIND], + enum_types=[_STANDARDSQLDATATYPE_TYPEKIND,], serialized_options=None, is_extendable=False, syntax="proto3", @@ -173,7 +173,7 @@ index=0, containing_type=None, fields=[], - ) + ), ], serialized_start=143, serialized_end=602, @@ -261,7 +261,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], diff --git a/bigquery/google/cloud/bigquery_v2/types.py b/bigquery/google/cloud/bigquery_v2/types.py index da9287c07824..ee852364a10f 100644 --- a/bigquery/google/cloud/bigquery_v2/types.py +++ b/bigquery/google/cloud/bigquery_v2/types.py @@ -29,7 +29,11 @@ from google.protobuf import wrappers_pb2 -_shared_modules = [empty_pb2, timestamp_pb2, wrappers_pb2] +_shared_modules = [ + empty_pb2, + timestamp_pb2, + wrappers_pb2, +] _local_modules = [ encryption_config_pb2, diff --git a/bigquery/noxfile.py b/bigquery/noxfile.py index 37611a5ce296..a6d8094ebbc3 100644 --- a/bigquery/noxfile.py +++ b/bigquery/noxfile.py @@ -20,11 +20,7 @@ import nox -LOCAL_DEPS = ( - os.path.join("..", "api_core[grpc]"), - os.path.join("..", "core"), - os.path.join("..", "test_utils"), -) +LOCAL_DEPS = (os.path.join("..", "api_core[grpc]"), os.path.join("..", "core")) BLACK_PATHS = ("docs", "google", "samples", "tests", "noxfile.py", "setup.py") @@ -42,6 +38,7 @@ def default(session): for local_dep in LOCAL_DEPS: session.install("-e", local_dep) + session.install("-e", os.path.join("..", "test_utils")) dev_install = ".[all]" session.install("-e", dev_install) @@ -150,6 +147,7 @@ def lint(session): session.install("-e", ".") session.run("flake8", os.path.join("google", "cloud", "bigquery")) session.run("flake8", "tests") + session.run("flake8", os.path.join("docs", "samples")) session.run("flake8", os.path.join("docs", "snippets.py")) session.run("black", "--check", *BLACK_PATHS) diff --git a/bigquery/samples/add_empty_column.py b/bigquery/samples/add_empty_column.py index eb84037598d3..bd531898eb29 100644 --- a/bigquery/samples/add_empty_column.py +++ b/bigquery/samples/add_empty_column.py @@ -21,17 +21,18 @@ def add_empty_column(client, table_id): # TODO(developer): Construct a BigQuery client object. # client = bigquery.Client() - # TODO(developer): Set table_id to the ID of the table to add an empty column. + # TODO(developer): Set table_id to the ID of the table + # to add an empty column. # table_id = "your-project.your_dataset.your_table_name" - table = client.get_table(table_id) + table = client.get_table(table_id) # Make an API request. original_schema = table.schema - new_schema = original_schema[:] # creates a copy of the schema + new_schema = original_schema[:] # Creates a copy of the schema. new_schema.append(bigquery.SchemaField("phone", "STRING")) table.schema = new_schema - table = client.update_table(table, ["schema"]) # API request + table = client.update_table(table, ["schema"]) # Make an API request. if len(table.schema) == len(original_schema) + 1 == len(new_schema): print("A new column has been added.") diff --git a/bigquery/samples/browse_table_data.py b/bigquery/samples/browse_table_data.py index dd6c572cab6d..78d1d351a7a7 100644 --- a/bigquery/samples/browse_table_data.py +++ b/bigquery/samples/browse_table_data.py @@ -26,7 +26,7 @@ def browse_table_data(client, table_id): # table_id = "your-project.your_dataset.your_table_name" # Download all rows from a table. - rows_iter = client.list_rows(table_id) + rows_iter = client.list_rows(table_id) # Make an API request. # Iterate over rows to make the API requests to fetch row data. rows = list(rows_iter) @@ -38,10 +38,18 @@ def browse_table_data(client, table_id): print("Downloaded {} rows from table {}".format(len(rows), table_id)) # Specify selected fields to limit the results to certain columns. - table = client.get_table(table_id) - fields = table.schema[:2] # first two columns + table = client.get_table(table_id) # Make an API request. + fields = table.schema[:2] # First two columns. rows_iter = client.list_rows(table_id, selected_fields=fields, max_results=10) rows = list(rows_iter) print("Selected {} columns from table {}.".format(len(rows_iter.schema), table_id)) print("Downloaded {} rows from table {}".format(len(rows), table_id)) + + # Print row data in tabular format. + rows = client.list_rows(table, max_results=10) + format_string = "{!s:<16} " * len(rows.schema) + field_names = [field.name for field in rows.schema] + print(format_string.format(*field_names)) # Prints column headers. + for row in rows: + print(format_string.format(*row)) # Prints row data. # [END bigquery_browse_table] diff --git a/bigquery/samples/client_list_jobs.py b/bigquery/samples/client_list_jobs.py new file mode 100644 index 000000000000..08eb4fbd99ef --- /dev/null +++ b/bigquery/samples/client_list_jobs.py @@ -0,0 +1,50 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def client_list_jobs(client): + + # [START bigquery_list_jobs] + # TODO(developer): Import the client library. + # from google.cloud import bigquery + + import datetime + + # TODO(developer): Construct a BigQuery client object. + # client = bigquery.Client() + + # List the 10 most recent jobs in reverse chronological order. + # Omit the max_results parameter to list jobs from the past 6 months. + print("Last 10 jobs:") + for job in client.list_jobs(max_results=10): # API request(s) + print("{}".format(job.job_id)) + + # The following are examples of additional optional parameters: + + # Use min_creation_time and/or max_creation_time to specify a time window. + print("Jobs from the last ten minutes:") + ten_mins_ago = datetime.datetime.utcnow() - datetime.timedelta(minutes=10) + for job in client.list_jobs(min_creation_time=ten_mins_ago): + print("{}".format(job.job_id)) + + # Use all_users to include jobs run by all users in the project. + print("Last 10 jobs run by all users:") + for job in client.list_jobs(max_results=10, all_users=True): + print("{} run by user: {}".format(job.job_id, job.user_email)) + + # Use state_filter to filter by job state. + print("Last 10 jobs done:") + for job in client.list_jobs(max_results=10, state_filter="DONE"): + print("{}".format(job.job_id)) + # [END bigquery_list_jobs] diff --git a/bigquery/samples/client_query.py b/bigquery/samples/client_query.py new file mode 100644 index 000000000000..9dccfd38cbcf --- /dev/null +++ b/bigquery/samples/client_query.py @@ -0,0 +1,41 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def client_query(client): + + # [START bigquery_query] + # TODO(developer): Import the client library. + # from google.cloud import bigquery + + # TODO(developer): Construct a BigQuery client object. + # client = bigquery.Client() + + query = """ + SELECT name, SUM(number) as total_people + FROM `bigquery-public-data.usa_names.usa_1910_2013` + WHERE state = 'TX' + GROUP BY name, state + ORDER BY total_people DESC + LIMIT 20 + """ + query_job = client.query( + query, location="US" # Must match the destination dataset(s) location. + ) # Make an API request. + + print("The query data:") + for row in query_job: + # Row values can be accessed by field name or index. + print("name={}, count={}".format(row[0], row["total_people"])) + # [END bigquery_query] diff --git a/bigquery/samples/copy_table.py b/bigquery/samples/copy_table.py new file mode 100644 index 000000000000..f6ebd91470eb --- /dev/null +++ b/bigquery/samples/copy_table.py @@ -0,0 +1,39 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def copy_table(client, source_table_id, destination_table_id): + + # [START bigquery_copy_table] + # TODO(developer): Import the client library. + # from google.cloud import bigquery + + # TODO(developer): Construct a BigQuery client object. + # client = bigquery.Client() + + # TODO(developer): Set source_table_id to the ID of the original table. + # source_table_id = "your-project.source_dataset.source_table" + + # TODO(developer): Set destination_table_id to the ID of the destination table. + # destination_table_id = "your-project.destination_dataset.destination_table" + + job = client.copy_table( + source_table_id, + destination_table_id, + location="US", # Must match the source and destination tables location. + ) + job.result() # Waits for job to complete. + + print("A copy of the table created.") + # [END bigquery_copy_table] diff --git a/bigquery/samples/create_dataset.py b/bigquery/samples/create_dataset.py index 89ca9d38f5f3..3d64473a2321 100644 --- a/bigquery/samples/create_dataset.py +++ b/bigquery/samples/create_dataset.py @@ -33,6 +33,6 @@ def create_dataset(client, dataset_id): # Send the dataset to the API for creation. # Raises google.api_core.exceptions.Conflict if the Dataset already # exists within the project. - dataset = client.create_dataset(dataset) # API request + dataset = client.create_dataset(dataset) # Make an API request. print("Created dataset {}.{}".format(client.project, dataset.dataset_id)) # [END bigquery_create_dataset] diff --git a/bigquery/samples/create_job.py b/bigquery/samples/create_job.py index 24bb85510598..4f7f27a8e668 100644 --- a/bigquery/samples/create_job.py +++ b/bigquery/samples/create_job.py @@ -33,7 +33,7 @@ def create_job(client): # The client libraries automatically generate a job ID. Override the # generated ID with either the job_id_prefix or job_id parameters. job_id_prefix="code_sample_", - ) # API request + ) # Make an API request. print("Started job: {}".format(query_job.job_id)) # [END bigquery_create_job] diff --git a/bigquery/samples/create_routine.py b/bigquery/samples/create_routine.py index c08ec4799a3e..424ee4ef5553 100644 --- a/bigquery/samples/create_routine.py +++ b/bigquery/samples/create_routine.py @@ -40,7 +40,7 @@ def create_routine(client, routine_id): ], ) - routine = client.create_routine(routine) + routine = client.create_routine(routine) # Make an API request. print("Created routine {}".format(routine.reference)) # [END bigquery_create_routine] diff --git a/bigquery/samples/create_routine_ddl.py b/bigquery/samples/create_routine_ddl.py index a4ae3318e7b4..eb5af0388503 100644 --- a/bigquery/samples/create_routine_ddl.py +++ b/bigquery/samples/create_routine_ddl.py @@ -34,12 +34,8 @@ def create_routine_ddl(client, routine_id): """.format( routine_id ) - - # Initiate the query to create the routine. - query_job = client.query(sql) - - # Wait for the query to complete. - query_job.result() + query_job = client.query(sql) # Make an API request. + query_job.result() # Wait for the job to complete. print("Created routine {}".format(query_job.ddl_target_routine)) # [END bigquery_create_routine_ddl] diff --git a/bigquery/samples/create_table.py b/bigquery/samples/create_table.py index 2a6e98fc72f6..ae26c57fed00 100644 --- a/bigquery/samples/create_table.py +++ b/bigquery/samples/create_table.py @@ -21,7 +21,7 @@ def create_table(client, table_id): # TODO(developer): Construct a BigQuery client object. # client = bigquery.Client() - # TODO(developer): Set table_id to the ID of the table to create + # TODO(developer): Set table_id to the ID of the table to create. # table_id = "your-project.your_dataset.your_table_name" schema = [ @@ -30,7 +30,7 @@ def create_table(client, table_id): ] table = bigquery.Table(table_id, schema=schema) - table = client.create_table(table) # API request + table = client.create_table(table) # Make an API request. print( "Created table {}.{}.{}".format(table.project, table.dataset_id, table.table_id) ) diff --git a/bigquery/samples/dataset_exists.py b/bigquery/samples/dataset_exists.py index 46cf26a623bf..b8b53b8a4580 100644 --- a/bigquery/samples/dataset_exists.py +++ b/bigquery/samples/dataset_exists.py @@ -22,7 +22,7 @@ def dataset_exists(client, dataset_id): # dataset_id = "your-project.your_dataset" try: - client.get_dataset(dataset_id) + client.get_dataset(dataset_id) # Make an API request. print("Dataset {} already exists".format(dataset_id)) except NotFound: print("Dataset {} is not found".format(dataset_id)) diff --git a/bigquery/samples/delete_dataset.py b/bigquery/samples/delete_dataset.py index 6cde1b6b2d27..8ce95d953392 100644 --- a/bigquery/samples/delete_dataset.py +++ b/bigquery/samples/delete_dataset.py @@ -25,9 +25,11 @@ def delete_dataset(client, dataset_id): # TODO(developer): Set model_id to the ID of the model to fetch. # dataset_id = 'your-project.your_dataset' - # Use the delete_contents parameter to delete a dataset and its contents + # Use the delete_contents parameter to delete a dataset and its contents. # Use the not_found_ok parameter to not receive an error if the dataset has already been deleted. - client.delete_dataset(dataset_id, delete_contents=True, not_found_ok=True) + client.delete_dataset( + dataset_id, delete_contents=True, not_found_ok=True + ) # Make an API request. print("Deleted dataset '{}'.".format(dataset_id)) # [END bigquery_delete_dataset] diff --git a/bigquery/samples/delete_dataset_labels.py b/bigquery/samples/delete_dataset_labels.py index 33ff5c0f2620..9e6493694ddc 100644 --- a/bigquery/samples/delete_dataset_labels.py +++ b/bigquery/samples/delete_dataset_labels.py @@ -25,12 +25,12 @@ def delete_dataset_labels(client, dataset_id): # TODO(developer): Set dataset_id to the ID of the dataset to fetch. # dataset_id = "your-project.your_dataset" - dataset = client.get_dataset(dataset_id) + dataset = client.get_dataset(dataset_id) # Make an API request. - # To delete a label from a dataset, set its value to None + # To delete a label from a dataset, set its value to None. dataset.labels["color"] = None - dataset = client.update_dataset(dataset, ["labels"]) + dataset = client.update_dataset(dataset, ["labels"]) # Make an API request. print("Labels deleted from {}".format(dataset_id)) # [END bigquery_delete_label_dataset] return dataset diff --git a/bigquery/samples/delete_model.py b/bigquery/samples/delete_model.py index 5ac4305bc97e..b6f32a59ebd9 100644 --- a/bigquery/samples/delete_model.py +++ b/bigquery/samples/delete_model.py @@ -26,7 +26,7 @@ def delete_model(client, model_id): # TODO(developer): Set model_id to the ID of the model to fetch. # model_id = 'your-project.your_dataset.your_model' - client.delete_model(model_id) + client.delete_model(model_id) # Make an API request. print("Deleted model '{}'.".format(model_id)) # [END bigquery_delete_model] diff --git a/bigquery/samples/delete_routine.py b/bigquery/samples/delete_routine.py index c0164b415008..c20b49837b75 100644 --- a/bigquery/samples/delete_routine.py +++ b/bigquery/samples/delete_routine.py @@ -25,7 +25,7 @@ def delete_routine(client, routine_id): # TODO(developer): Set the fully-qualified ID for the routine. # routine_id = "my-project.my_dataset.my_routine" - client.delete_routine(routine_id) + client.delete_routine(routine_id) # Make an API request. print("Deleted routine {}.".format(routine_id)) # [END bigquery_delete_routine] diff --git a/bigquery/samples/delete_table.py b/bigquery/samples/delete_table.py index dcdd3d855b2e..b83a92890b09 100644 --- a/bigquery/samples/delete_table.py +++ b/bigquery/samples/delete_table.py @@ -26,7 +26,7 @@ def delete_table(client, table_id): # table_id = 'your-project.your_dataset.your_table' # If the table does not exist, delete_table raises - # google.api_core.exceptions.NotFound unless not_found_ok is True - client.delete_table(table_id, not_found_ok=True) + # google.api_core.exceptions.NotFound unless not_found_ok is True. + client.delete_table(table_id, not_found_ok=True) # Make an API request. print("Deleted table '{}'.".format(table_id)) # [END bigquery_delete_table] diff --git a/bigquery/samples/get_dataset.py b/bigquery/samples/get_dataset.py index 5586c2b95ebb..bb3d4a0d4c40 100644 --- a/bigquery/samples/get_dataset.py +++ b/bigquery/samples/get_dataset.py @@ -25,7 +25,7 @@ def get_dataset(client, dataset_id): # TODO(developer): Set dataset_id to the ID of the dataset to fetch. # dataset_id = 'your-project.your_dataset' - dataset = client.get_dataset(dataset_id) + dataset = client.get_dataset(dataset_id) # Make an API request. full_dataset_id = "{}.{}".format(dataset.project, dataset.dataset_id) friendly_name = dataset.friendly_name @@ -35,7 +35,7 @@ def get_dataset(client, dataset_id): ) ) - # View dataset properties + # View dataset properties. print("Description: {}".format(dataset.description)) print("Labels:") labels = dataset.labels @@ -45,9 +45,9 @@ def get_dataset(client, dataset_id): else: print("\tDataset has no labels defined.") - # View tables in dataset + # View tables in dataset. print("Tables:") - tables = list(client.list_tables(dataset)) # API request(s) + tables = list(client.list_tables(dataset)) # Make an API request(s). if tables: for table in tables: print("\t{}".format(table.table_id)) diff --git a/bigquery/samples/get_dataset_labels.py b/bigquery/samples/get_dataset_labels.py index 2f21723a550b..411607f84664 100644 --- a/bigquery/samples/get_dataset_labels.py +++ b/bigquery/samples/get_dataset_labels.py @@ -25,9 +25,9 @@ def get_dataset_labels(client, dataset_id): # TODO(developer): Set dataset_id to the ID of the dataset to fetch. # dataset_id = "your-project.your_dataset" - dataset = client.get_dataset(dataset_id) + dataset = client.get_dataset(dataset_id) # Make an API request. - # View dataset labels + # View dataset labels. print("Dataset ID: {}".format(dataset_id)) print("Labels:") if dataset.labels: diff --git a/bigquery/samples/get_model.py b/bigquery/samples/get_model.py index 69986733c50b..0ebd59c9d067 100644 --- a/bigquery/samples/get_model.py +++ b/bigquery/samples/get_model.py @@ -26,7 +26,7 @@ def get_model(client, model_id): # TODO(developer): Set model_id to the ID of the model to fetch. # model_id = 'your-project.your_dataset.your_model' - model = client.get_model(model_id) + model = client.get_model(model_id) # Make an API request. full_model_id = "{}.{}.{}".format(model.project, model.dataset_id, model.model_id) friendly_name = model.friendly_name diff --git a/bigquery/samples/get_routine.py b/bigquery/samples/get_routine.py index d9035c282438..da4e89f57f19 100644 --- a/bigquery/samples/get_routine.py +++ b/bigquery/samples/get_routine.py @@ -25,15 +25,15 @@ def get_routine(client, routine_id): # TODO(developer): Set the fully-qualified ID for the routine. # routine_id = "my-project.my_dataset.my_routine" - routine = client.get_routine(routine_id) + routine = client.get_routine(routine_id) # Make an API request. - print("Routine `{}`:".format(routine.reference)) - print(" Type: '{}'".format(routine.type_)) - print(" Language: '{}'".format(routine.language)) - print(" Arguments:") + print("Routine '{}':".format(routine.reference)) + print("\tType: '{}'".format(routine.type_)) + print("\tLanguage: '{}'".format(routine.language)) + print("\tArguments:") for argument in routine.arguments: - print(" Name: '{}'".format(argument.name)) - print(" Type: '{}'".format(argument.type_)) + print("\t\tName: '{}'".format(argument.name)) + print("\t\tType: '{}'".format(argument.data_type)) # [END bigquery_get_routine] return routine diff --git a/bigquery/samples/get_table.py b/bigquery/samples/get_table.py index e49e032f6e23..201b8808a846 100644 --- a/bigquery/samples/get_table.py +++ b/bigquery/samples/get_table.py @@ -25,13 +25,12 @@ def get_table(client, table_id): # TODO(developer): Set table_id to the ID of the model to fetch. # table_id = 'your-project.your_dataset.your_table' - table = client.get_table(table_id) + table = client.get_table(table_id) # Make an API request. + # View table properties print( "Got table '{}.{}.{}'.".format(table.project, table.dataset_id, table.table_id) ) - - # View table properties print("Table schema: {}".format(table.schema)) print("Table description: {}".format(table.description)) print("Table has {} rows".format(table.num_rows)) diff --git a/bigquery/samples/label_dataset.py b/bigquery/samples/label_dataset.py index 7840ea25a63f..019b2aa374a0 100644 --- a/bigquery/samples/label_dataset.py +++ b/bigquery/samples/label_dataset.py @@ -25,9 +25,9 @@ def label_dataset(client, dataset_id): # TODO(developer): Set dataset_id to the ID of the dataset to fetch. # dataset_id = "your-project.your_dataset" - dataset = client.get_dataset(dataset_id) + dataset = client.get_dataset(dataset_id) # Make an API request. dataset.labels = {"color": "green"} - dataset = client.update_dataset(dataset, ["labels"]) + dataset = client.update_dataset(dataset, ["labels"]) # Make an API request. print("Labels added to {}".format(dataset_id)) # [END bigquery_label_dataset] diff --git a/bigquery/samples/list_datasets.py b/bigquery/samples/list_datasets.py index b57aad1b5e7b..77ae8c785d22 100644 --- a/bigquery/samples/list_datasets.py +++ b/bigquery/samples/list_datasets.py @@ -22,12 +22,12 @@ def list_datasets(client): # TODO(developer): Construct a BigQuery client object. # client = bigquery.Client() - datasets = list(client.list_datasets()) + datasets = list(client.list_datasets()) # Make an API request. project = client.project if datasets: print("Datasets in project {}:".format(project)) - for dataset in datasets: # API request(s) + for dataset in datasets: print("\t{}".format(dataset.dataset_id)) else: print("{} project does not contain any datasets.".format(project)) diff --git a/bigquery/samples/list_datasets_by_label.py b/bigquery/samples/list_datasets_by_label.py index 8b574b1110eb..9fa939ad0c19 100644 --- a/bigquery/samples/list_datasets_by_label.py +++ b/bigquery/samples/list_datasets_by_label.py @@ -23,7 +23,7 @@ def list_datasets_by_label(client): # client = bigquery.Client() label_filter = "labels.color:green" - datasets = list(client.list_datasets(filter=label_filter)) + datasets = list(client.list_datasets(filter=label_filter)) # Make an API request. if datasets: print("Datasets filtered by {}:".format(label_filter)) diff --git a/bigquery/samples/list_models.py b/bigquery/samples/list_models.py index 5b4d21799b28..a2477ffc795b 100644 --- a/bigquery/samples/list_models.py +++ b/bigquery/samples/list_models.py @@ -27,7 +27,7 @@ def list_models(client, dataset_id): # the models you are listing. # dataset_id = 'your-project.your_dataset' - models = client.list_models(dataset_id) + models = client.list_models(dataset_id) # Make an API request. print("Models contained in '{}':".format(dataset_id)) for model in models: diff --git a/bigquery/samples/list_routines.py b/bigquery/samples/list_routines.py index 1ae4f441cde1..5eaad0cec8f4 100644 --- a/bigquery/samples/list_routines.py +++ b/bigquery/samples/list_routines.py @@ -26,7 +26,7 @@ def list_routines(client, dataset_id): # the routines you are listing. # dataset_id = 'your-project.your_dataset' - routines = client.list_routines(dataset_id) + routines = client.list_routines(dataset_id) # Make an API request. print("Routines contained in dataset {}:".format(dataset_id)) for routine in routines: diff --git a/bigquery/samples/list_tables.py b/bigquery/samples/list_tables.py index 2057f2d73891..d7576616e191 100644 --- a/bigquery/samples/list_tables.py +++ b/bigquery/samples/list_tables.py @@ -26,7 +26,7 @@ def list_tables(client, dataset_id): # the tables you are listing. # dataset_id = 'your-project.your_dataset' - tables = client.list_tables(dataset_id) + tables = client.list_tables(dataset_id) # Make an API request. print("Tables contained in '{}':".format(dataset_id)) for table in tables: diff --git a/bigquery/samples/load_table_dataframe.py b/bigquery/samples/load_table_dataframe.py index 69eeb6ef89d0..ea6fe5d02384 100644 --- a/bigquery/samples/load_table_dataframe.py +++ b/bigquery/samples/load_table_dataframe.py @@ -14,8 +14,10 @@ def load_table_dataframe(client, table_id): + # [START bigquery_load_table_dataframe] from google.cloud import bigquery + import pandas # TODO(developer): Construct a BigQuery client object. @@ -59,11 +61,14 @@ def load_table_dataframe(client, table_id): ) job = client.load_table_from_dataframe( - dataframe, table_id, job_config=job_config, location="US" - ) - job.result() # Waits for table load to complete. + dataframe, + table_id, + job_config=job_config, + location="US", # Must match the destination dataset location. + ) # Make an API request. + job.result() # Wait for the job to complete. - table = client.get_table(table_id) + table = client.get_table(table_id) # Make an API request. print( "Loaded {} rows and {} columns to {}".format( table.num_rows, len(table.schema), table_id diff --git a/bigquery/samples/query_external_sheets_permanent_table.py b/bigquery/samples/query_external_sheets_permanent_table.py new file mode 100644 index 000000000000..ce9b1c928782 --- /dev/null +++ b/bigquery/samples/query_external_sheets_permanent_table.py @@ -0,0 +1,73 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def query_external_sheets_permanent_table(dataset_id): + + # [START bigquery_query_external_sheets_perm] + from google.cloud import bigquery + import google.auth + + # Create credentials with Drive & BigQuery API scopes. + # Both APIs must be enabled for your project before running this code. + credentials, project = google.auth.default( + scopes=[ + "https://www.googleapis.com/auth/drive", + "https://www.googleapis.com/auth/bigquery", + ] + ) + + # TODO(developer): Construct a BigQuery client object. + client = bigquery.Client(credentials=credentials, project=project) + + # TODO(developer): Set dataset_id to the ID of the dataset to fetch. + # dataset_id = "your-project.your_dataset" + + # Configure the external data source. + dataset = client.get_dataset(dataset_id) + table_id = "us_states" + schema = [ + bigquery.SchemaField("name", "STRING"), + bigquery.SchemaField("post_abbr", "STRING"), + ] + table = bigquery.Table(dataset.table(table_id), schema=schema) + external_config = bigquery.ExternalConfig("GOOGLE_SHEETS") + # Use a shareable link or grant viewing access to the email address you + # used to authenticate with BigQuery (this example Sheet is public). + sheet_url = ( + "https://docs.google.com/spreadsheets" + "/d/1i_QCL-7HcSyUZmIbP9E6lO_T5u3HnpLe7dnpHaijg_E/edit?usp=sharing" + ) + external_config.source_uris = [sheet_url] + external_config.options.skip_leading_rows = 1 # Optionally skip header row. + external_config.options.range = ( + "us-states!A20:B49" # Optionally set range of the sheet to query from. + ) + table.external_data_configuration = external_config + + # Create a permanent table linked to the Sheets file. + table = client.create_table(table) # Make an API request. + + # Example query to find states starting with "W". + sql = 'SELECT * FROM `{}.{}` WHERE name LIKE "W%"'.format(dataset_id, table_id) + query_job = client.query(sql) # Make an API request. + + # Wait for the query to complete. + w_states = list(query_job) + print( + "There are {} states with names starting with W in the selected range.".format( + len(w_states) + ) + ) + # [END bigquery_query_external_sheets_perm] diff --git a/bigquery/samples/query_external_sheets_temporary_table.py b/bigquery/samples/query_external_sheets_temporary_table.py new file mode 100644 index 000000000000..e89b6efab362 --- /dev/null +++ b/bigquery/samples/query_external_sheets_temporary_table.py @@ -0,0 +1,69 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def query_external_sheets_temporary_table(): + + # [START bigquery_query_external_sheets_temp] + # [START bigquery_auth_drive_scope] + from google.cloud import bigquery + import google.auth + + # Create credentials with Drive & BigQuery API scopes. + # Both APIs must be enabled for your project before running this code. + credentials, project = google.auth.default( + scopes=[ + "https://www.googleapis.com/auth/drive", + "https://www.googleapis.com/auth/bigquery", + ] + ) + + # TODO(developer): Construct a BigQuery client object. + client = bigquery.Client(credentials=credentials, project=project) + # [END bigquery_auth_drive_scope] + + # Configure the external data source and query job. + external_config = bigquery.ExternalConfig("GOOGLE_SHEETS") + + # Use a shareable link or grant viewing access to the email address you + # used to authenticate with BigQuery (this example Sheet is public). + sheet_url = ( + "https://docs.google.com/spreadsheets" + "/d/1i_QCL-7HcSyUZmIbP9E6lO_T5u3HnpLe7dnpHaijg_E/edit?usp=sharing" + ) + external_config.source_uris = [sheet_url] + external_config.schema = [ + bigquery.SchemaField("name", "STRING"), + bigquery.SchemaField("post_abbr", "STRING"), + ] + external_config.options.skip_leading_rows = 1 # Optionally skip header row. + external_config.options.range = ( + "us-states!A20:B49" # Optionally set range of the sheet to query from. + ) + table_id = "us_states" + job_config = bigquery.QueryJobConfig() + job_config.table_definitions = {table_id: external_config} + + # Example query to find states starting with "W". + sql = 'SELECT * FROM `{}` WHERE name LIKE "W%"'.format(table_id) + query_job = client.query(sql, job_config=job_config) # Make an API request. + + # Wait for the query to complete. + w_states = list(query_job) + print( + "There are {} states with names starting with W in the selected range.".format( + len(w_states) + ) + ) + # [END bigquery_query_external_sheets_temp] diff --git a/bigquery/samples/query_script.py b/bigquery/samples/query_script.py new file mode 100644 index 000000000000..453b7c6f9435 --- /dev/null +++ b/bigquery/samples/query_script.py @@ -0,0 +1,69 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def query_script(client): + # [START bigquery_query_script] + # TODO(developer): Import the client library. + # from google.cloud import bigquery + + # TODO(developer): Construct a BigQuery client object. + # client = bigquery.Client() + + # Run a SQL script. + sql_script = """ + -- Declare a variable to hold names as an array. + DECLARE top_names ARRAY; + + -- Build an array of the top 100 names from the year 2017. + SET top_names = ( + SELECT ARRAY_AGG(name ORDER BY number DESC LIMIT 100) + FROM `bigquery-public-data.usa_names.usa_1910_2013` + WHERE year = 2000 + ); + + -- Which names appear as words in Shakespeare's plays? + SELECT + name AS shakespeare_name + FROM UNNEST(top_names) AS name + WHERE name IN ( + SELECT word + FROM `bigquery-public-data.samples.shakespeare` + ); + """ + parent_job = client.query(sql_script) + + # Wait for the whole script to finish. + rows_iterable = parent_job.result() + print("Script created {} child jobs.".format(parent_job.num_child_jobs)) + + # Fetch result rows for the final sub-job in the script. + rows = list(rows_iterable) + print( + "{} of the top 100 names from year 2000 also appear in Shakespeare's works.".format( + len(rows) + ) + ) + + # Fetch jobs created by the SQL script. + child_jobs_iterable = client.list_jobs(parent_job=parent_job) + for child_job in child_jobs_iterable: + child_rows = list(child_job.result()) + print( + "Child job with ID {} produced {} row(s).".format( + child_job.job_id, len(child_rows) + ) + ) + + # [END bigquery_query_script] diff --git a/bigquery/samples/query_to_arrow.py b/bigquery/samples/query_to_arrow.py index b13dcf3e1413..4cc69d4e902a 100644 --- a/bigquery/samples/query_to_arrow.py +++ b/bigquery/samples/query_to_arrow.py @@ -41,7 +41,7 @@ def query_to_arrow(client): CROSS JOIN UNNEST(r.participants) as participant; """ query_job = client.query(sql) - arrow_table = query_job.to_arrow() + arrow_table = query_job.to_arrow() # Make an API request. print( "Downloaded {} rows, {} columns.".format( diff --git a/bigquery/samples/table_exists.py b/bigquery/samples/table_exists.py new file mode 100644 index 000000000000..a011e6e2915d --- /dev/null +++ b/bigquery/samples/table_exists.py @@ -0,0 +1,29 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def table_exists(client, table_id): + + # [START bigquery_table_exists] + from google.cloud.exceptions import NotFound + + # TODO(developer): Set table_id to the ID of the table to determine existence. + # table_id = "your-project.your_dataset.your_table" + + try: + client.get_table(table_id) # Make an API request. + print("Table {} already exists.".format(table_id)) + except NotFound: + print("Table {} is not found.".format(table_id)) + # [END bigquery_table_exists] diff --git a/bigquery/samples/table_insert_rows.py b/bigquery/samples/table_insert_rows.py new file mode 100644 index 000000000000..e2f949b635a6 --- /dev/null +++ b/bigquery/samples/table_insert_rows.py @@ -0,0 +1,34 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def table_insert_rows(client, table_id): + + # [START bigquery_table_insert_rows] + # TODO(developer): Import the client library. + # from google.cloud import bigquery + + # TODO(developer): Construct a BigQuery client object. + # client = bigquery.Client() + + # TODO(developer): Set table_id to the ID of the model to fetch. + # table_id = "your-project.your_dataset.your_table" + + table = client.get_table(table_id) # Make an API request. + rows_to_insert = [(u"Phred Phlyntstone", 32), (u"Wylma Phlyntstone", 29)] + + errors = client.insert_rows(table, rows_to_insert) # Make an API request. + if errors == []: + print("New rows have been added.") + # [END bigquery_table_insert_rows] diff --git a/bigquery/samples/table_insert_rows_explicit_none_insert_ids.py b/bigquery/samples/table_insert_rows_explicit_none_insert_ids.py new file mode 100644 index 000000000000..953e7e210312 --- /dev/null +++ b/bigquery/samples/table_insert_rows_explicit_none_insert_ids.py @@ -0,0 +1,36 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def table_insert_rows_explicit_none_insert_ids(client, table_id): + + # [START bigquery_table_insert_rows_explicit_none_insert_ids] + # TODO(developer): Import the client library. + # from google.cloud import bigquery + + # TODO(developer): Construct a BigQuery client object. + # client = bigquery.Client() + + # TODO(developer): Set table_id to the ID of the model to fetch. + # table_id = "your-project.your_dataset.your_table" + + table = client.get_table(table_id) # Make an API request. + rows_to_insert = [(u"Phred Phlyntstone", 32), (u"Wylma Phlyntstone", 29)] + + errors = client.insert_rows( + table, rows_to_insert, row_ids=[None] * len(rows_to_insert) + ) # Make an API request. + if errors == []: + print("New rows have been added.") + # [END bigquery_table_insert_rows_explicit_none_insert_ids] diff --git a/bigquery/samples/tests/conftest.py b/bigquery/samples/tests/conftest.py index f2bb93112a22..32b23931aa91 100644 --- a/bigquery/samples/tests/conftest.py +++ b/bigquery/samples/tests/conftest.py @@ -57,7 +57,7 @@ def random_routine_id(client, dataset_id): @pytest.fixture def dataset_id(client): now = datetime.datetime.now() - dataset_id = "python_samples_{}_{}".format( + dataset_id = "python_dataset_sample_{}_{}".format( now.strftime("%Y%m%d%H%M%S"), uuid.uuid4().hex[:8] ) dataset = client.create_dataset(dataset_id) @@ -68,7 +68,7 @@ def dataset_id(client): @pytest.fixture def table_id(client, dataset_id): now = datetime.datetime.now() - table_id = "python_samples_{}_{}".format( + table_id = "python_table_sample_{}_{}".format( now.strftime("%Y%m%d%H%M%S"), uuid.uuid4().hex[:8] ) @@ -86,7 +86,7 @@ def table_with_data_id(client): @pytest.fixture def routine_id(client, dataset_id): now = datetime.datetime.now() - routine_id = "python_samples_{}_{}".format( + routine_id = "python_routine_sample_{}_{}".format( now.strftime("%Y%m%d%H%M%S"), uuid.uuid4().hex[:8] ) diff --git a/bigquery/samples/tests/test_browse_table_data.py b/bigquery/samples/tests/test_browse_table_data.py index f777bf91ca00..0e9cc6055494 100644 --- a/bigquery/samples/tests/test_browse_table_data.py +++ b/bigquery/samples/tests/test_browse_table_data.py @@ -24,3 +24,5 @@ def test_browse_table_data(capsys, client, table_with_data_id): assert "Downloaded 10 rows from table {}".format(table_with_data_id) in out assert "Selected 2 columns from table {}".format(table_with_data_id) in out assert "Downloaded 10 rows from table {}".format(table_with_data_id) in out + assert "word" in out + assert "LVII" in out diff --git a/bigquery/samples/tests/test_client_list_jobs.py b/bigquery/samples/tests/test_client_list_jobs.py new file mode 100644 index 000000000000..011e081fdee4 --- /dev/null +++ b/bigquery/samples/tests/test_client_list_jobs.py @@ -0,0 +1,31 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from .. import client_list_jobs +from .. import create_job + + +def test_client_list_jobs(capsys, client): + + job = create_job.create_job(client) + client.cancel_job(job.job_id) + job.cancel() + client_list_jobs.client_list_jobs(client) + out, err = capsys.readouterr() + assert "Started job: {}".format(job.job_id) in out + assert "Last 10 jobs:" in out + assert "Jobs from the last ten minutes:" in out + assert "Last 10 jobs run by all users:" in out + assert "Last 10 jobs done:" in out diff --git a/bigquery/samples/tests/test_client_query.py b/bigquery/samples/tests/test_client_query.py new file mode 100644 index 000000000000..fd5b8e7edd97 --- /dev/null +++ b/bigquery/samples/tests/test_client_query.py @@ -0,0 +1,24 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from .. import client_query + + +def test_client_query(capsys, client): + + client_query.client_query(client) + out, err = capsys.readouterr() + assert "The query data:" in out + assert "name=James, count=272793" in out diff --git a/bigquery/samples/tests/test_copy_table.py b/bigquery/samples/tests/test_copy_table.py new file mode 100644 index 000000000000..6d7de2d9132c --- /dev/null +++ b/bigquery/samples/tests/test_copy_table.py @@ -0,0 +1,27 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from .. import copy_table + + +def test_copy_table(capsys, client, table_with_data_id, random_table_id): + + copy_table.copy_table(client, table_with_data_id, random_table_id) + out, err = capsys.readouterr() + assert "A copy of the table created." in out + assert ( + client.get_table(random_table_id).num_rows + == client.get_table(table_with_data_id).num_rows + ) diff --git a/bigquery/samples/tests/test_create_dataset.py b/bigquery/samples/tests/test_create_dataset.py index dfadc67d8468..e52e9ddfdced 100644 --- a/bigquery/samples/tests/test_create_dataset.py +++ b/bigquery/samples/tests/test_create_dataset.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. + from .. import create_dataset diff --git a/bigquery/samples/tests/test_create_job.py b/bigquery/samples/tests/test_create_job.py index fce005ae8236..5ead51156606 100644 --- a/bigquery/samples/tests/test_create_job.py +++ b/bigquery/samples/tests/test_create_job.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. + from .. import create_job diff --git a/bigquery/samples/tests/test_create_routine.py b/bigquery/samples/tests/test_create_routine.py new file mode 100644 index 000000000000..7220d63542e2 --- /dev/null +++ b/bigquery/samples/tests/test_create_routine.py @@ -0,0 +1,23 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from .. import create_routine + + +def test_create_routine(capsys, client, random_routine_id): + + create_routine.create_routine(client, random_routine_id) + out, err = capsys.readouterr() + assert "Created routine {}".format(random_routine_id) in out diff --git a/bigquery/samples/tests/test_routine_samples.py b/bigquery/samples/tests/test_create_routine_ddl.py similarity index 69% rename from bigquery/samples/tests/test_routine_samples.py rename to bigquery/samples/tests/test_create_routine_ddl.py index 5a1c69c7f60f..bcb3249d26ef 100644 --- a/bigquery/samples/tests/test_routine_samples.py +++ b/bigquery/samples/tests/test_create_routine_ddl.py @@ -12,27 +12,19 @@ # See the License for the specific language governing permissions and # limitations under the License. + from google.cloud import bigquery from google.cloud import bigquery_v2 - -def test_create_routine(capsys, client, random_routine_id): - from .. import create_routine - - create_routine.create_routine(client, random_routine_id) - out, err = capsys.readouterr() - assert "Created routine {}".format(random_routine_id) in out +from .. import create_routine_ddl def test_create_routine_ddl(capsys, client, random_routine_id): - from .. import create_routine_ddl create_routine_ddl.create_routine_ddl(client, random_routine_id) routine = client.get_routine(random_routine_id) out, err = capsys.readouterr() - assert "Created routine {}".format(random_routine_id) in out - return routine assert routine.type_ == "SCALAR_FUNCTION" assert routine.language == "SQL" expected_arguments = [ @@ -63,27 +55,3 @@ def test_create_routine_ddl(capsys, client, random_routine_id): ) ] assert routine.arguments == expected_arguments - - -def test_list_routines(capsys, client, dataset_id, routine_id): - from .. import list_routines - - list_routines.list_routines(client, dataset_id) - out, err = capsys.readouterr() - assert "Routines contained in dataset {}:".format(dataset_id) in out - assert routine_id in out - - -def test_delete_routine(capsys, client, routine_id): - from .. import delete_routine - - delete_routine.delete_routine(client, routine_id) - out, err = capsys.readouterr() - assert "Deleted routine {}.".format(routine_id) in out - - -def test_update_routine(client, routine_id): - from .. import update_routine - - routine = update_routine.update_routine(client, routine_id) - assert routine.body == "x * 4" diff --git a/bigquery/samples/tests/test_create_table.py b/bigquery/samples/tests/test_create_table.py index 093ee6e94277..f9ebc0e5d70d 100644 --- a/bigquery/samples/tests/test_create_table.py +++ b/bigquery/samples/tests/test_create_table.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. + from .. import create_table diff --git a/bigquery/samples/tests/test_dataset_label_samples.py b/bigquery/samples/tests/test_dataset_label_samples.py index 94a2092407b0..1e526f2339ac 100644 --- a/bigquery/samples/tests/test_dataset_label_samples.py +++ b/bigquery/samples/tests/test_dataset_label_samples.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. + from .. import delete_dataset_labels from .. import get_dataset_labels from .. import label_dataset diff --git a/bigquery/samples/tests/test_delete_dataset.py b/bigquery/samples/tests/test_delete_dataset.py index 2b1b6ad06195..836b3aebb272 100644 --- a/bigquery/samples/tests/test_delete_dataset.py +++ b/bigquery/samples/tests/test_delete_dataset.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. + from .. import delete_dataset diff --git a/bigquery/samples/tests/test_delete_routine.py b/bigquery/samples/tests/test_delete_routine.py new file mode 100644 index 000000000000..9347d1e22dc2 --- /dev/null +++ b/bigquery/samples/tests/test_delete_routine.py @@ -0,0 +1,23 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from .. import delete_routine + + +def test_delete_routine(capsys, client, routine_id): + + delete_routine.delete_routine(client, routine_id) + out, err = capsys.readouterr() + assert "Deleted routine {}.".format(routine_id) in out diff --git a/bigquery/samples/tests/test_delete_table.py b/bigquery/samples/tests/test_delete_table.py index 8f4796623a83..f76ad8624cc6 100644 --- a/bigquery/samples/tests/test_delete_table.py +++ b/bigquery/samples/tests/test_delete_table.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. + from .. import delete_table diff --git a/bigquery/samples/tests/test_get_dataset.py b/bigquery/samples/tests/test_get_dataset.py index 374f8835211a..8682be7ee3e9 100644 --- a/bigquery/samples/tests/test_get_dataset.py +++ b/bigquery/samples/tests/test_get_dataset.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. + from .. import get_dataset @@ -19,4 +20,4 @@ def test_get_dataset(capsys, client, dataset_id): get_dataset.get_dataset(client, dataset_id) out, err = capsys.readouterr() - assert "{}".format(dataset_id) in out + assert dataset_id in out diff --git a/bigquery/samples/tests/test_get_routine.py b/bigquery/samples/tests/test_get_routine.py new file mode 100644 index 000000000000..fa5f3093116c --- /dev/null +++ b/bigquery/samples/tests/test_get_routine.py @@ -0,0 +1,27 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from .. import get_routine + + +def test_get_routine(capsys, client, routine_id): + + get_routine.get_routine(client, routine_id) + out, err = capsys.readouterr() + assert "Routine '{}':".format(routine_id) in out + assert "Type: 'SCALAR_FUNCTION'" in out + assert "Language: 'SQL'" in out + assert "Name: 'x'" in out + assert "Type: 'type_kind: INT64\n'" in out diff --git a/bigquery/samples/tests/test_get_table.py b/bigquery/samples/tests/test_get_table.py index b811ccecad1f..8adaa6557954 100644 --- a/bigquery/samples/tests/test_get_table.py +++ b/bigquery/samples/tests/test_get_table.py @@ -12,7 +12,9 @@ # See the License for the specific language governing permissions and # limitations under the License. + from google.cloud import bigquery + from .. import get_table @@ -30,7 +32,7 @@ def test_get_table(capsys, client, random_table_id): get_table.get_table(client, random_table_id) out, err = capsys.readouterr() assert "Got table '{}'.".format(random_table_id) in out - assert "full_name" in out # test that schema is printed + assert "full_name" in out assert "Table description: Sample Table" in out assert "Table has 0 rows" in out client.delete_table(table, not_found_ok=True) diff --git a/bigquery/samples/tests/test_list_datasets.py b/bigquery/samples/tests/test_list_datasets.py index 4c66a24f9b1a..d8c32e91ee20 100644 --- a/bigquery/samples/tests/test_list_datasets.py +++ b/bigquery/samples/tests/test_list_datasets.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. + from .. import list_datasets diff --git a/bigquery/samples/tests/test_list_datasets_by_label.py b/bigquery/samples/tests/test_list_datasets_by_label.py index 346cbf1a982d..f414539b00b3 100644 --- a/bigquery/samples/tests/test_list_datasets_by_label.py +++ b/bigquery/samples/tests/test_list_datasets_by_label.py @@ -23,4 +23,4 @@ def test_list_datasets_by_label(capsys, client, dataset_id): dataset = client.update_dataset(dataset, ["labels"]) list_datasets_by_label.list_datasets_by_label(client) out, err = capsys.readouterr() - assert "{}".format(dataset_id) in out + assert dataset_id in out diff --git a/bigquery/samples/tests/test_list_routines.py b/bigquery/samples/tests/test_list_routines.py new file mode 100644 index 000000000000..e249238e1976 --- /dev/null +++ b/bigquery/samples/tests/test_list_routines.py @@ -0,0 +1,24 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from .. import list_routines + + +def test_list_routines(capsys, client, dataset_id, routine_id): + + list_routines.list_routines(client, dataset_id) + out, err = capsys.readouterr() + assert "Routines contained in dataset {}:".format(dataset_id) in out + assert routine_id in out diff --git a/bigquery/samples/tests/test_list_tables.py b/bigquery/samples/tests/test_list_tables.py index ec1621ac7579..61ac04ea26ce 100644 --- a/bigquery/samples/tests/test_list_tables.py +++ b/bigquery/samples/tests/test_list_tables.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. + from .. import list_tables diff --git a/bigquery/samples/tests/test_load_table_dataframe.py b/bigquery/samples/tests/test_load_table_dataframe.py index d553d449a525..2151704d3b25 100644 --- a/bigquery/samples/tests/test_load_table_dataframe.py +++ b/bigquery/samples/tests/test_load_table_dataframe.py @@ -12,16 +12,18 @@ # See the License for the specific language governing permissions and # limitations under the License. + import pytest from .. import load_table_dataframe -pytest.importorskip("pandas") -pytest.importorskip("pyarrow") +pandas = pytest.importorskip("pandas") +pyarrow = pytest.importorskip("pyarrow") def test_load_table_dataframe(capsys, client, random_table_id): + table = load_table_dataframe.load_table_dataframe(client, random_table_id) out, _ = capsys.readouterr() assert "Loaded 4 rows and 3 columns" in out diff --git a/bigquery/samples/tests/test_model_samples.py b/bigquery/samples/tests/test_model_samples.py index d7b06a92a3e1..99d838533917 100644 --- a/bigquery/samples/tests/test_model_samples.py +++ b/bigquery/samples/tests/test_model_samples.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. + from .. import delete_model from .. import get_model from .. import list_models diff --git a/bigquery/samples/tests/test_query_external_sheets_permanent_table.py b/bigquery/samples/tests/test_query_external_sheets_permanent_table.py new file mode 100644 index 000000000000..a7b5db09e5af --- /dev/null +++ b/bigquery/samples/tests/test_query_external_sheets_permanent_table.py @@ -0,0 +1,25 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from .. import query_external_sheets_permanent_table + + +def test_query_external_sheets_permanent_table(capsys, dataset_id): + + query_external_sheets_permanent_table.query_external_sheets_permanent_table( + dataset_id + ) + out, err = capsys.readouterr() + assert "There are 2 states with names starting with W in the selected range." in out diff --git a/bigquery/samples/tests/test_query_external_sheets_temporary_table.py b/bigquery/samples/tests/test_query_external_sheets_temporary_table.py new file mode 100644 index 000000000000..4856b6a49d2b --- /dev/null +++ b/bigquery/samples/tests/test_query_external_sheets_temporary_table.py @@ -0,0 +1,23 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from .. import query_external_sheets_temporary_table + + +def test_query_external_sheets_temporary_table(capsys): + + query_external_sheets_temporary_table.query_external_sheets_temporary_table() + out, err = capsys.readouterr() + assert "There are 2 states with names starting with W in the selected range." in out diff --git a/bigquery/samples/tests/test_query_script.py b/bigquery/samples/tests/test_query_script.py new file mode 100644 index 000000000000..70bb9df76fd4 --- /dev/null +++ b/bigquery/samples/tests/test_query_script.py @@ -0,0 +1,28 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from .. import query_script + + +def test_query_script(capsys, client): + + query_script.query_script(client) + out, _ = capsys.readouterr() + assert "Script created 2 child jobs." in out + assert ( + "53 of the top 100 names from year 2000 also appear in Shakespeare's works." + in out + ) + assert "produced 53 row(s)" in out + assert "produced 1 row(s)" in out diff --git a/bigquery/samples/tests/test_query_to_arrow.py b/bigquery/samples/tests/test_query_to_arrow.py index f70bd49fe565..2fbed807ece4 100644 --- a/bigquery/samples/tests/test_query_to_arrow.py +++ b/bigquery/samples/tests/test_query_to_arrow.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. + import pyarrow from .. import query_to_arrow @@ -22,7 +23,6 @@ def test_query_to_arrow(capsys, client): arrow_table = query_to_arrow.query_to_arrow(client) out, err = capsys.readouterr() assert "Downloaded 8 rows, 2 columns." in out - arrow_schema = arrow_table.schema assert arrow_schema.names == ["race", "participant"] assert pyarrow.types.is_string(arrow_schema.types[0]) diff --git a/bigquery/samples/tests/test_table_exists.py b/bigquery/samples/tests/test_table_exists.py new file mode 100644 index 000000000000..232d77fbcb60 --- /dev/null +++ b/bigquery/samples/tests/test_table_exists.py @@ -0,0 +1,30 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from google.cloud import bigquery + +from .. import table_exists + + +def test_table_exists(capsys, client, random_table_id): + + table_exists.table_exists(client, random_table_id) + out, err = capsys.readouterr() + assert "Table {} is not found.".format(random_table_id) in out + table = bigquery.Table(random_table_id) + table = client.create_table(table) + table_exists.table_exists(client, random_table_id) + out, err = capsys.readouterr() + assert "Table {} already exists.".format(random_table_id) in out diff --git a/bigquery/samples/tests/test_table_insert_rows.py b/bigquery/samples/tests/test_table_insert_rows.py new file mode 100644 index 000000000000..95d119dbdc93 --- /dev/null +++ b/bigquery/samples/tests/test_table_insert_rows.py @@ -0,0 +1,33 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from google.cloud import bigquery + +from .. import table_insert_rows + + +def test_table_insert_rows(capsys, client, random_table_id): + + schema = [ + bigquery.SchemaField("full_name", "STRING", mode="REQUIRED"), + bigquery.SchemaField("age", "INTEGER", mode="REQUIRED"), + ] + + table = bigquery.Table(random_table_id, schema=schema) + table = client.create_table(table) + + table_insert_rows.table_insert_rows(client, random_table_id) + out, err = capsys.readouterr() + assert "New rows have been added." in out diff --git a/bigquery/samples/tests/test_table_insert_rows_explicit_none_insert_ids.py b/bigquery/samples/tests/test_table_insert_rows_explicit_none_insert_ids.py new file mode 100644 index 000000000000..6a59609baacf --- /dev/null +++ b/bigquery/samples/tests/test_table_insert_rows_explicit_none_insert_ids.py @@ -0,0 +1,33 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from google.cloud import bigquery + +from .. import table_insert_rows_explicit_none_insert_ids as mut + + +def test_table_insert_rows_explicit_none_insert_ids(capsys, client, random_table_id): + + schema = [ + bigquery.SchemaField("full_name", "STRING", mode="REQUIRED"), + bigquery.SchemaField("age", "INTEGER", mode="REQUIRED"), + ] + + table = bigquery.Table(random_table_id, schema=schema) + table = client.create_table(table) + + mut.table_insert_rows_explicit_none_insert_ids(client, random_table_id) + out, err = capsys.readouterr() + assert "New rows have been added." in out diff --git a/bigquery/samples/tests/test_update_dataset_access.py b/bigquery/samples/tests/test_update_dataset_access.py index ae33dbfe4a4c..679b700731e3 100644 --- a/bigquery/samples/tests/test_update_dataset_access.py +++ b/bigquery/samples/tests/test_update_dataset_access.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. + from .. import update_dataset_access diff --git a/bigquery/samples/tests/test_update_dataset_default_partition_expiration.py b/bigquery/samples/tests/test_update_dataset_default_partition_expiration.py new file mode 100644 index 000000000000..55fa4b0d96fb --- /dev/null +++ b/bigquery/samples/tests/test_update_dataset_default_partition_expiration.py @@ -0,0 +1,31 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from .. import update_dataset_default_partition_expiration + + +def test_update_dataset_default_partition_expiration(capsys, client, dataset_id): + + ninety_days_ms = 90 * 24 * 60 * 60 * 1000 # in milliseconds + + update_dataset_default_partition_expiration.update_dataset_default_partition_expiration( + client, dataset_id + ) + out, _ = capsys.readouterr() + assert ( + "Updated dataset {} with new default partition expiration {}".format( + dataset_id, ninety_days_ms + ) + in out + ) diff --git a/bigquery/samples/tests/test_update_dataset_default_table_expiration.py b/bigquery/samples/tests/test_update_dataset_default_table_expiration.py index 46e9654209ed..a97de11a2f1a 100644 --- a/bigquery/samples/tests/test_update_dataset_default_table_expiration.py +++ b/bigquery/samples/tests/test_update_dataset_default_table_expiration.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. + from .. import update_dataset_default_table_expiration diff --git a/bigquery/samples/tests/test_update_dataset_description.py b/bigquery/samples/tests/test_update_dataset_description.py index c6f8889f50da..63826077b976 100644 --- a/bigquery/samples/tests/test_update_dataset_description.py +++ b/bigquery/samples/tests/test_update_dataset_description.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. + from .. import update_dataset_description diff --git a/translate/tests/unit/__init__.py b/bigquery/samples/tests/test_update_routine.py similarity index 65% rename from translate/tests/unit/__init__.py rename to bigquery/samples/tests/test_update_routine.py index df379f1e9d88..8adfab32e032 100644 --- a/translate/tests/unit/__init__.py +++ b/bigquery/samples/tests/test_update_routine.py @@ -1,13 +1,22 @@ -# Copyright 2016 Google LLC +# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. + + +from .. import update_routine + + +def test_update_routine(client, routine_id): + + routine = update_routine.update_routine(client, routine_id) + assert routine.body == "x * 4" diff --git a/bigquery/samples/tests/test_update_table_require_partition_filter.py b/bigquery/samples/tests/test_update_table_require_partition_filter.py new file mode 100644 index 000000000000..1cbd2b2279b2 --- /dev/null +++ b/bigquery/samples/tests/test_update_table_require_partition_filter.py @@ -0,0 +1,33 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from google.cloud import bigquery +from .. import update_table_require_partition_filter + + +def test_update_table_require_partition_filter(capsys, client, random_table_id): + # Make a partitioned table. + schema = [bigquery.SchemaField("transaction_timestamp", "TIMESTAMP")] + table = bigquery.Table(random_table_id, schema) + table.time_partitioning = bigquery.TimePartitioning(field="transaction_timestamp") + table = client.create_table(table) + + update_table_require_partition_filter.update_table_require_partition_filter( + client, random_table_id + ) + out, _ = capsys.readouterr() + assert ( + "Updated table '{}' with require_partition_filter=True".format(random_table_id) + in out + ) diff --git a/bigquery/samples/update_dataset_access.py b/bigquery/samples/update_dataset_access.py index aa316a38dff9..134cf1b940cf 100644 --- a/bigquery/samples/update_dataset_access.py +++ b/bigquery/samples/update_dataset_access.py @@ -24,7 +24,7 @@ def update_dataset_access(client, dataset_id): # TODO(developer): Set dataset_id to the ID of the dataset to fetch. # dataset_id = 'your-project.your_dataset' - dataset = client.get_dataset(dataset_id) + dataset = client.get_dataset(dataset_id) # Make an API request. entry = bigquery.AccessEntry( role="READER", @@ -36,7 +36,7 @@ def update_dataset_access(client, dataset_id): entries.append(entry) dataset.access_entries = entries - dataset = client.update_dataset(dataset, ["access_entries"]) # API request + dataset = client.update_dataset(dataset, ["access_entries"]) # Make an API request. full_dataset_id = "{}.{}".format(dataset.project, dataset.dataset_id) print( diff --git a/bigquery/samples/update_dataset_default_partition_expiration.py b/bigquery/samples/update_dataset_default_partition_expiration.py new file mode 100644 index 000000000000..502d52ff199b --- /dev/null +++ b/bigquery/samples/update_dataset_default_partition_expiration.py @@ -0,0 +1,43 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def update_dataset_default_partition_expiration(client, dataset_id): + + # [START bigquery_update_dataset_partition_expiration] + # TODO(developer): Import the client library. + # from google.cloud import bigquery + + # TODO(developer): Construct a BigQuery client object. + # client = bigquery.Client() + + # TODO(developer): Set dataset_id to the ID of the dataset to fetch. + # dataset_id = 'your-project.your_dataset' + + dataset = client.get_dataset(dataset_id) # Make an API request. + + # Set the default partition expiration (applies to new tables, only) in + # milliseconds. This example sets the default expiration to 90 days. + dataset.default_partition_expiration_ms = 90 * 24 * 60 * 60 * 1000 + + dataset = client.update_dataset( + dataset, ["default_partition_expiration_ms"] + ) # Make an API request. + + print( + "Updated dataset {}.{} with new default partition expiration {}".format( + dataset.project, dataset.dataset_id, dataset.default_partition_expiration_ms + ) + ) + # [END bigquery_update_dataset_partition_expiration] diff --git a/bigquery/samples/update_dataset_default_table_expiration.py b/bigquery/samples/update_dataset_default_table_expiration.py index 4534bb2011eb..8de354b1f21b 100644 --- a/bigquery/samples/update_dataset_default_table_expiration.py +++ b/bigquery/samples/update_dataset_default_table_expiration.py @@ -25,12 +25,12 @@ def update_dataset_default_table_expiration(client, dataset_id): # TODO(developer): Set dataset_id to the ID of the dataset to fetch. # dataset_id = 'your-project.your_dataset' - dataset = client.get_dataset(dataset_id) - dataset.default_table_expiration_ms = 24 * 60 * 60 * 1000 # in milliseconds + dataset = client.get_dataset(dataset_id) # Make an API request. + dataset.default_table_expiration_ms = 24 * 60 * 60 * 1000 # In milliseconds. dataset = client.update_dataset( dataset, ["default_table_expiration_ms"] - ) # API request + ) # Make an API request. full_dataset_id = "{}.{}".format(dataset.project, dataset.dataset_id) print( diff --git a/bigquery/samples/update_dataset_description.py b/bigquery/samples/update_dataset_description.py index f3afb7fa68ce..08eed8da2b64 100644 --- a/bigquery/samples/update_dataset_description.py +++ b/bigquery/samples/update_dataset_description.py @@ -25,9 +25,9 @@ def update_dataset_description(client, dataset_id): # TODO(developer): Set dataset_id to the ID of the dataset to fetch. # dataset_id = 'your-project.your_dataset' - dataset = client.get_dataset(dataset_id) + dataset = client.get_dataset(dataset_id) # Make an API request. dataset.description = "Updated description." - dataset = client.update_dataset(dataset, ["description"]) + dataset = client.update_dataset(dataset, ["description"]) # Make an API request. full_dataset_id = "{}.{}".format(dataset.project, dataset.dataset_id) print( diff --git a/bigquery/samples/update_model.py b/bigquery/samples/update_model.py index 5df4ada886ed..7583c410e1ef 100644 --- a/bigquery/samples/update_model.py +++ b/bigquery/samples/update_model.py @@ -26,9 +26,9 @@ def update_model(client, model_id): # TODO(developer): Set model_id to the ID of the model to fetch. # model_id = 'your-project.your_dataset.your_model' - model = client.get_model(model_id) + model = client.get_model(model_id) # Make an API request. model.description = "This model was modified from a Python program." - model = client.update_model(model, ["description"]) + model = client.update_model(model, ["description"]) # Make an API request. full_model_id = "{}.{}.{}".format(model.project, model.dataset_id, model.model_id) print( diff --git a/bigquery/samples/update_routine.py b/bigquery/samples/update_routine.py index 4d491d42e168..4489d68f7ee4 100644 --- a/bigquery/samples/update_routine.py +++ b/bigquery/samples/update_routine.py @@ -33,13 +33,14 @@ def update_routine(client, routine_id): routine, [ "body", - # Due to a limitation of the API, all fields are required, not just + # Due to a limitation of the API, + # all fields are required, not just # those that have been updated. "arguments", "language", "type_", "return_type", ], - ) + ) # Make an API request. # [END bigquery_update_routine] return routine diff --git a/bigquery/samples/update_table_require_partition_filter.py b/bigquery/samples/update_table_require_partition_filter.py new file mode 100644 index 000000000000..4c6be2d2cedc --- /dev/null +++ b/bigquery/samples/update_table_require_partition_filter.py @@ -0,0 +1,41 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def update_table_require_partition_filter(client, table_id): + + # [START bigquery_update_table_require_partition_filter] + # TODO(developer): Import the client library. + # from google.cloud import bigquery + + # TODO(developer): Construct a BigQuery client object. + # client = bigquery.Client() + + # TODO(developer): Set table_id to the ID of the model to fetch. + # table_id = 'your-project.your_dataset.your_table' + + table = client.get_table(table_id) # Make an API request. + table.require_partition_filter = True + table = client.update_table(table, ["require_partition_filter"]) + + # View table properties + print( + "Updated table '{}.{}.{}' with require_partition_filter={}.".format( + table.project, + table.dataset_id, + table.table_id, + table.require_partition_filter, + ) + ) + # [END bigquery_update_table_require_partition_filter] diff --git a/bigquery/setup.py b/bigquery/setup.py index 897a7eac7f36..e0f3edf19d45 100644 --- a/bigquery/setup.py +++ b/bigquery/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-bigquery" description = "Google BigQuery API client library" -version = "1.20.0" +version = "1.21.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' diff --git a/bigquery/synth.metadata b/bigquery/synth.metadata index b85a00155ee8..863d7b1ad9e6 100644 --- a/bigquery/synth.metadata +++ b/bigquery/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-09-12T14:51:08.578469Z", + "updateTime": "2019-10-29T12:13:17.119821Z", "sources": [ { "generator": { "name": "artman", - "version": "0.36.2", - "dockerImage": "googleapis/artman@sha256:0e6f3a668cd68afc768ecbe08817cf6e56a0e64fcbdb1c58c3b97492d12418a1" + "version": "0.40.3", + "dockerImage": "googleapis/artman@sha256:c805f50525f5f557886c94ab76f56eaa09cb1da58c3ee95111fd34259376621a" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "1cb29d0fd49437d8e5d7de327e258739e998f01c", - "internalRef": "268598527" + "sha": "532773acbed8d09451dafb3d403ab1823e6a6e1e", + "internalRef": "277177415" } } ], diff --git a/bigquery/tests/unit/model/test_model.py b/bigquery/tests/unit/model/test_model.py index b6d9756e15fe..bbb93ef9e897 100644 --- a/bigquery/tests/unit/model/test_model.py +++ b/bigquery/tests/unit/model/test_model.py @@ -21,6 +21,8 @@ import google.cloud._helpers from google.cloud.bigquery_v2.gapic import enums +KMS_KEY_NAME = "projects/1/locations/us/keyRings/1/cryptoKeys/1" + @pytest.fixture def target_class(): @@ -99,6 +101,7 @@ def test_from_api_repr(target_class): }, ], "featureColumns": [], + "encryptionConfiguration": {"kmsKeyName": KMS_KEY_NAME}, } got = target_class.from_api_repr(resource) @@ -116,6 +119,7 @@ def test_from_api_repr(target_class): assert got.friendly_name == u"A friendly name." assert got.model_type == enums.Model.ModelType.LOGISTIC_REGRESSION assert got.labels == {"greeting": u"こんにちは"} + assert got.encryption_configuration.kms_key_name == KMS_KEY_NAME assert got.training_runs[0].training_options.initial_learn_rate == 1.0 assert ( got.training_runs[0] @@ -160,6 +164,7 @@ def test_from_api_repr_w_minimal_resource(target_class): assert got.friendly_name is None assert got.model_type == enums.Model.ModelType.MODEL_TYPE_UNSPECIFIED assert got.labels == {} + assert got.encryption_configuration is None assert len(got.training_runs) == 0 assert len(got.feature_columns) == 0 assert len(got.label_columns) == 0 @@ -229,6 +234,17 @@ def test_from_api_repr_w_unknown_fields(target_class): ["labels"], {"labels": {"a-label": "a-value"}}, ), + ( + { + "friendlyName": "hello", + "description": "world", + "expirationTime": None, + "labels": {"a-label": "a-value"}, + "encryptionConfiguration": {"kmsKeyName": KMS_KEY_NAME}, + }, + ["encryptionConfiguration"], + {"encryptionConfiguration": {"kmsKeyName": KMS_KEY_NAME}}, + ), ], ) def test_build_resource(object_under_test, resource, filter_fields, expected): @@ -283,6 +299,18 @@ def test_replace_labels(object_under_test): assert object_under_test.labels == {} +def test_set_encryption_configuration(object_under_test): + from google.cloud.bigquery.encryption_configuration import EncryptionConfiguration + + assert not object_under_test.encryption_configuration + object_under_test.encryption_configuration = EncryptionConfiguration( + kms_key_name=KMS_KEY_NAME + ) + assert object_under_test.encryption_configuration.kms_key_name == KMS_KEY_NAME + object_under_test.encryption_configuration = None + assert not object_under_test.encryption_configuration + + def test_repr(target_class): model = target_class("my-proj.my_dset.my_model") got = repr(model) diff --git a/bigquery/tests/unit/test__helpers.py b/bigquery/tests/unit/test__helpers.py index 3884695d83af..6d92b4de73ba 100644 --- a/bigquery/tests/unit/test__helpers.py +++ b/bigquery/tests/unit/test__helpers.py @@ -17,6 +17,8 @@ import decimal import unittest +import mock + class Test_not_null(unittest.TestCase): def _call_fut(self, value, field): @@ -412,7 +414,8 @@ class Test_row_tuple_from_json(unittest.TestCase): def _call_fut(self, row, schema): from google.cloud.bigquery._helpers import _row_tuple_from_json - return _row_tuple_from_json(row, schema) + with _field_isinstance_patcher(): + return _row_tuple_from_json(row, schema) def test_w_single_scalar_column(self): # SELECT 1 AS col @@ -529,7 +532,8 @@ class Test_rows_from_json(unittest.TestCase): def _call_fut(self, rows, schema): from google.cloud.bigquery._helpers import _rows_from_json - return _rows_from_json(rows, schema) + with _field_isinstance_patcher(): + return _rows_from_json(rows, schema) def test_w_record_subfield(self): from google.cloud.bigquery.table import Row @@ -1023,3 +1027,23 @@ def __init__(self, mode, name="unknown", field_type="UNKNOWN", fields=()): self.name = name self.field_type = field_type self.fields = fields + + +def _field_isinstance_patcher(): + """A patcher thank makes _Field instances seem like SchemaField instances. + """ + from google.cloud.bigquery.schema import SchemaField + + def fake_isinstance(instance, target_class): + if instance.__class__.__name__ != "_Field": + return isinstance(instance, target_class) # pragma: NO COVER + + # pretend that _Field() instances are actually instances of SchemaField + return target_class is SchemaField or ( + isinstance(target_class, tuple) and SchemaField in target_class + ) + + patcher = mock.patch( + "google.cloud.bigquery.schema.isinstance", side_effect=fake_isinstance + ) + return patcher diff --git a/bigquery/tests/unit/test__pandas_helpers.py b/bigquery/tests/unit/test__pandas_helpers.py index b539abe9a89a..a6ccec2e094f 100644 --- a/bigquery/tests/unit/test__pandas_helpers.py +++ b/bigquery/tests/unit/test__pandas_helpers.py @@ -16,6 +16,7 @@ import datetime import decimal import functools +import operator import warnings import mock @@ -34,6 +35,7 @@ import pytest import pytz +from google import api_core from google.cloud.bigquery import schema @@ -618,7 +620,7 @@ def test_list_columns_and_indexes_without_named_index(module_under_test): @pytest.mark.skipif(pandas is None, reason="Requires `pandas`") def test_list_columns_and_indexes_with_named_index_same_as_column_name( - module_under_test + module_under_test, ): df_data = collections.OrderedDict( [ @@ -700,6 +702,32 @@ def test_list_columns_and_indexes_with_multiindex(module_under_test): assert columns_and_indexes == expected +@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") +def test_dataframe_to_bq_schema_dict_sequence(module_under_test): + df_data = collections.OrderedDict( + [ + ("str_column", [u"hello", u"world"]), + ("int_column", [42, 8]), + ("bool_column", [True, False]), + ] + ) + dataframe = pandas.DataFrame(df_data) + + dict_schema = [ + {"name": "str_column", "type": "STRING", "mode": "NULLABLE"}, + {"name": "bool_column", "type": "BOOL", "mode": "REQUIRED"}, + ] + + returned_schema = module_under_test.dataframe_to_bq_schema(dataframe, dict_schema) + + expected_schema = ( + schema.SchemaField("str_column", "STRING", "NULLABLE"), + schema.SchemaField("int_column", "INTEGER", "NULLABLE"), + schema.SchemaField("bool_column", "BOOL", "REQUIRED"), + ) + assert returned_schema == expected_schema + + @pytest.mark.skipif(pandas is None, reason="Requires `pandas`") @pytest.mark.skipif(pyarrow is None, reason="Requires `pyarrow`") def test_dataframe_to_arrow_with_multiindex(module_under_test): @@ -855,6 +883,28 @@ def test_dataframe_to_arrow_with_unknown_type(module_under_test): assert arrow_schema[3].name == "field03" +@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") +@pytest.mark.skipif(pyarrow is None, reason="Requires `pyarrow`") +def test_dataframe_to_arrow_dict_sequence_schema(module_under_test): + dict_schema = [ + {"name": "field01", "type": "STRING", "mode": "REQUIRED"}, + {"name": "field02", "type": "BOOL", "mode": "NULLABLE"}, + ] + + dataframe = pandas.DataFrame( + {"field01": [u"hello", u"world"], "field02": [True, False]} + ) + + arrow_table = module_under_test.dataframe_to_arrow(dataframe, dict_schema) + arrow_schema = arrow_table.schema + + expected_fields = [ + pyarrow.field("field01", "string", nullable=False), + pyarrow.field("field02", "bool", nullable=True), + ] + assert list(arrow_schema) == expected_fields + + @pytest.mark.skipif(pandas is None, reason="Requires `pandas`") def test_dataframe_to_parquet_without_pyarrow(module_under_test, monkeypatch): monkeypatch.setattr(module_under_test, "pyarrow", None) @@ -905,3 +955,342 @@ def test_dataframe_to_parquet_compression_method(module_under_test): call_args = fake_write_table.call_args assert call_args is not None assert call_args.kwargs.get("compression") == "ZSTD" + + +@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") +def test_dataframe_to_bq_schema_fallback_needed_wo_pyarrow(module_under_test): + dataframe = pandas.DataFrame( + data=[ + {"id": 10, "status": u"FOO", "execution_date": datetime.date(2019, 5, 10)}, + {"id": 20, "status": u"BAR", "created_at": datetime.date(2018, 9, 12)}, + ] + ) + + no_pyarrow_patch = mock.patch(module_under_test.__name__ + ".pyarrow", None) + + with no_pyarrow_patch, warnings.catch_warnings(record=True) as warned: + detected_schema = module_under_test.dataframe_to_bq_schema( + dataframe, bq_schema=[] + ) + + assert detected_schema is None + + # a warning should also be issued + expected_warnings = [ + warning for warning in warned if "could not determine" in str(warning).lower() + ] + assert len(expected_warnings) == 1 + msg = str(expected_warnings[0]) + assert "execution_date" in msg and "created_at" in msg + + +@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") +@pytest.mark.skipif(pyarrow is None, reason="Requires `pyarrow`") +def test_dataframe_to_bq_schema_fallback_needed_w_pyarrow(module_under_test): + dataframe = pandas.DataFrame( + data=[ + {"id": 10, "status": u"FOO", "created_at": datetime.date(2019, 5, 10)}, + {"id": 20, "status": u"BAR", "created_at": datetime.date(2018, 9, 12)}, + ] + ) + + with warnings.catch_warnings(record=True) as warned: + detected_schema = module_under_test.dataframe_to_bq_schema( + dataframe, bq_schema=[] + ) + + expected_schema = ( + schema.SchemaField("id", "INTEGER", mode="NULLABLE"), + schema.SchemaField("status", "STRING", mode="NULLABLE"), + schema.SchemaField("created_at", "DATE", mode="NULLABLE"), + ) + by_name = operator.attrgetter("name") + assert sorted(detected_schema, key=by_name) == sorted(expected_schema, key=by_name) + + # there should be no relevant warnings + unwanted_warnings = [ + warning for warning in warned if "could not determine" in str(warning).lower() + ] + assert not unwanted_warnings + + +@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") +@pytest.mark.skipif(pyarrow is None, reason="Requires `pyarrow`") +def test_dataframe_to_bq_schema_pyarrow_fallback_fails(module_under_test): + dataframe = pandas.DataFrame( + data=[ + {"struct_field": {"one": 2}, "status": u"FOO"}, + {"struct_field": {"two": u"222"}, "status": u"BAR"}, + ] + ) + + with warnings.catch_warnings(record=True) as warned: + detected_schema = module_under_test.dataframe_to_bq_schema( + dataframe, bq_schema=[] + ) + + assert detected_schema is None + + # a warning should also be issued + expected_warnings = [ + warning for warning in warned if "could not determine" in str(warning).lower() + ] + assert len(expected_warnings) == 1 + assert "struct_field" in str(expected_warnings[0]) + + +@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") +@pytest.mark.skipif(pyarrow is None, reason="Requires `pyarrow`") +def test_augment_schema_type_detection_succeeds(module_under_test): + dataframe = pandas.DataFrame( + data=[ + { + "bool_field": False, + "int_field": 123, + "float_field": 3.141592, + "time_field": datetime.time(17, 59, 47), + "timestamp_field": datetime.datetime(2005, 5, 31, 14, 25, 55), + "date_field": datetime.date(2005, 5, 31), + "bytes_field": b"some bytes", + "string_field": u"some characters", + "numeric_field": decimal.Decimal("123.456"), + } + ] + ) + + # NOTE: In Pandas dataframe, the dtype of Python's datetime instances is + # set to "datetime64[ns]", and pyarrow converts that to pyarrow.TimestampArray. + # We thus cannot expect to get a DATETIME date when converting back to the + # BigQuery type. + + current_schema = ( + schema.SchemaField("bool_field", field_type=None, mode="NULLABLE"), + schema.SchemaField("int_field", field_type=None, mode="NULLABLE"), + schema.SchemaField("float_field", field_type=None, mode="NULLABLE"), + schema.SchemaField("time_field", field_type=None, mode="NULLABLE"), + schema.SchemaField("timestamp_field", field_type=None, mode="NULLABLE"), + schema.SchemaField("date_field", field_type=None, mode="NULLABLE"), + schema.SchemaField("bytes_field", field_type=None, mode="NULLABLE"), + schema.SchemaField("string_field", field_type=None, mode="NULLABLE"), + schema.SchemaField("numeric_field", field_type=None, mode="NULLABLE"), + ) + + with warnings.catch_warnings(record=True) as warned: + augmented_schema = module_under_test.augment_schema(dataframe, current_schema) + + # there should be no relevant warnings + unwanted_warnings = [ + warning for warning in warned if "Pyarrow could not" in str(warning) + ] + assert not unwanted_warnings + + # the augmented schema must match the expected + expected_schema = ( + schema.SchemaField("bool_field", field_type="BOOL", mode="NULLABLE"), + schema.SchemaField("int_field", field_type="INT64", mode="NULLABLE"), + schema.SchemaField("float_field", field_type="FLOAT64", mode="NULLABLE"), + schema.SchemaField("time_field", field_type="TIME", mode="NULLABLE"), + schema.SchemaField("timestamp_field", field_type="TIMESTAMP", mode="NULLABLE"), + schema.SchemaField("date_field", field_type="DATE", mode="NULLABLE"), + schema.SchemaField("bytes_field", field_type="BYTES", mode="NULLABLE"), + schema.SchemaField("string_field", field_type="STRING", mode="NULLABLE"), + schema.SchemaField("numeric_field", field_type="NUMERIC", mode="NULLABLE"), + ) + by_name = operator.attrgetter("name") + assert sorted(augmented_schema, key=by_name) == sorted(expected_schema, key=by_name) + + +@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") +@pytest.mark.skipif(pyarrow is None, reason="Requires `pyarrow`") +def test_augment_schema_type_detection_fails(module_under_test): + dataframe = pandas.DataFrame( + data=[ + { + "status": u"FOO", + "struct_field": {"one": 1}, + "struct_field_2": {"foo": u"123"}, + }, + { + "status": u"BAR", + "struct_field": {"two": u"111"}, + "struct_field_2": {"bar": 27}, + }, + ] + ) + current_schema = [ + schema.SchemaField("status", field_type="STRING", mode="NULLABLE"), + schema.SchemaField("struct_field", field_type=None, mode="NULLABLE"), + schema.SchemaField("struct_field_2", field_type=None, mode="NULLABLE"), + ] + + with warnings.catch_warnings(record=True) as warned: + augmented_schema = module_under_test.augment_schema(dataframe, current_schema) + + assert augmented_schema is None + + expected_warnings = [ + warning for warning in warned if "could not determine" in str(warning) + ] + assert len(expected_warnings) == 1 + warning_msg = str(expected_warnings[0]) + assert "pyarrow" in warning_msg.lower() + assert "struct_field" in warning_msg and "struct_field_2" in warning_msg + + +@pytest.mark.skipif(pyarrow is None, reason="Requires `pyarrow`") +def test_dataframe_to_parquet_dict_sequence_schema(module_under_test): + dict_schema = [ + {"name": "field01", "type": "STRING", "mode": "REQUIRED"}, + {"name": "field02", "type": "BOOL", "mode": "NULLABLE"}, + ] + + dataframe = pandas.DataFrame( + {"field01": [u"hello", u"world"], "field02": [True, False]} + ) + + write_table_patch = mock.patch.object( + module_under_test.pyarrow.parquet, "write_table", autospec=True + ) + to_arrow_patch = mock.patch.object( + module_under_test, "dataframe_to_arrow", autospec=True + ) + + with write_table_patch, to_arrow_patch as fake_to_arrow: + module_under_test.dataframe_to_parquet(dataframe, dict_schema, None) + + expected_schema_arg = [ + schema.SchemaField("field01", "STRING", mode="REQUIRED"), + schema.SchemaField("field02", "BOOL", mode="NULLABLE"), + ] + schema_arg = fake_to_arrow.call_args.args[1] + assert schema_arg == expected_schema_arg + + +@pytest.mark.skipif(pyarrow is None, reason="Requires `pyarrow`") +def test_download_arrow_tabledata_list_unknown_field_type(module_under_test): + fake_page = api_core.page_iterator.Page( + parent=mock.Mock(), + items=[{"page_data": "foo"}], + item_to_value=api_core.page_iterator._item_to_value_identity, + ) + fake_page._columns = [[1, 10, 100], [2.2, 22.22, 222.222]] + pages = [fake_page] + + bq_schema = [ + schema.SchemaField("population_size", "INTEGER"), + schema.SchemaField("alien_field", "ALIEN_FLOAT_TYPE"), + ] + + results_gen = module_under_test.download_arrow_tabledata_list(pages, bq_schema) + + with warnings.catch_warnings(record=True) as warned: + result = next(results_gen) + + unwanted_warnings = [ + warning + for warning in warned + if "please pass schema= explicitly" in str(warning).lower() + ] + assert not unwanted_warnings + + assert len(result.columns) == 2 + col = result.columns[0] + assert type(col) is pyarrow.lib.Int64Array + assert list(col) == [1, 10, 100] + col = result.columns[1] + assert type(col) is pyarrow.lib.DoubleArray + assert list(col) == [2.2, 22.22, 222.222] + + +@pytest.mark.skipif(pyarrow is None, reason="Requires `pyarrow`") +def test_download_arrow_tabledata_list_known_field_type(module_under_test): + fake_page = api_core.page_iterator.Page( + parent=mock.Mock(), + items=[{"page_data": "foo"}], + item_to_value=api_core.page_iterator._item_to_value_identity, + ) + fake_page._columns = [[1, 10, 100], ["2.2", "22.22", "222.222"]] + pages = [fake_page] + + bq_schema = [ + schema.SchemaField("population_size", "INTEGER"), + schema.SchemaField("non_alien_field", "STRING"), + ] + + results_gen = module_under_test.download_arrow_tabledata_list(pages, bq_schema) + with warnings.catch_warnings(record=True) as warned: + result = next(results_gen) + + unwanted_warnings = [ + warning + for warning in warned + if "please pass schema= explicitly" in str(warning).lower() + ] + assert not unwanted_warnings + + assert len(result.columns) == 2 + col = result.columns[0] + assert type(col) is pyarrow.lib.Int64Array + assert list(col) == [1, 10, 100] + col = result.columns[1] + assert type(col) is pyarrow.lib.StringArray + assert list(col) == ["2.2", "22.22", "222.222"] + + +@pytest.mark.skipif(pyarrow is None, reason="Requires `pyarrow`") +def test_download_arrow_tabledata_list_dict_sequence_schema(module_under_test): + fake_page = api_core.page_iterator.Page( + parent=mock.Mock(), + items=[{"page_data": "foo"}], + item_to_value=api_core.page_iterator._item_to_value_identity, + ) + fake_page._columns = [[1, 10, 100], ["2.2", "22.22", "222.222"]] + pages = [fake_page] + + dict_schema = [ + {"name": "population_size", "type": "INTEGER", "mode": "NULLABLE"}, + {"name": "non_alien_field", "type": "STRING", "mode": "NULLABLE"}, + ] + + results_gen = module_under_test.download_arrow_tabledata_list(pages, dict_schema) + result = next(results_gen) + + assert len(result.columns) == 2 + col = result.columns[0] + assert type(col) is pyarrow.lib.Int64Array + assert list(col) == [1, 10, 100] + col = result.columns[1] + assert type(col) is pyarrow.lib.StringArray + assert list(col) == ["2.2", "22.22", "222.222"] + + +@pytest.mark.skipif(pandas is None, reason="Requires `pandas`") +@pytest.mark.skipif(pyarrow is None, reason="Requires `pyarrow`") +def test_download_dataframe_tabledata_list_dict_sequence_schema(module_under_test): + fake_page = api_core.page_iterator.Page( + parent=mock.Mock(), + items=[{"page_data": "foo"}], + item_to_value=api_core.page_iterator._item_to_value_identity, + ) + fake_page._columns = [[1, 10, 100], ["2.2", "22.22", "222.222"]] + pages = [fake_page] + + dict_schema = [ + {"name": "population_size", "type": "INTEGER", "mode": "NULLABLE"}, + {"name": "non_alien_field", "type": "STRING", "mode": "NULLABLE"}, + ] + + results_gen = module_under_test.download_dataframe_tabledata_list( + pages, dict_schema, dtypes={} + ) + result = next(results_gen) + + expected_result = pandas.DataFrame( + collections.OrderedDict( + [ + ("population_size", [1, 10, 100]), + ("non_alien_field", ["2.2", "22.22", "222.222"]), + ] + ) + ) + assert result.equals(expected_result) diff --git a/bigquery/tests/unit/test_client.py b/bigquery/tests/unit/test_client.py index ea4b114358a9..e6ed4d1c8072 100644 --- a/bigquery/tests/unit/test_client.py +++ b/bigquery/tests/unit/test_client.py @@ -81,7 +81,7 @@ class TestClient(unittest.TestCase): TABLE_ID = "TABLE_ID" MODEL_ID = "MODEL_ID" TABLE_REF = DatasetReference(PROJECT, DS_ID).table(TABLE_ID) - KMS_KEY_NAME = "projects/1/locations/global/keyRings/1/cryptoKeys/1" + KMS_KEY_NAME = "projects/1/locations/us/keyRings/1/cryptoKeys/1" LOCATION = "us-central" @staticmethod @@ -1074,7 +1074,9 @@ def test_create_table_w_custom_property(self): self.assertEqual(got.table_id, self.TABLE_ID) def test_create_table_w_encryption_configuration(self): - from google.cloud.bigquery.table import EncryptionConfiguration + from google.cloud.bigquery.encryption_configuration import ( + EncryptionConfiguration, + ) from google.cloud.bigquery.table import Table path = "projects/%s/datasets/%s/tables" % (self.PROJECT, self.DS_ID) @@ -1136,7 +1138,8 @@ def test_create_table_w_day_partition_and_expire(self): self.assertEqual(got.table_id, self.TABLE_ID) def test_create_table_w_schema_and_query(self): - from google.cloud.bigquery.table import Table, SchemaField + from google.cloud.bigquery.schema import SchemaField + from google.cloud.bigquery.table import Table path = "projects/%s/datasets/%s/tables" % (self.PROJECT, self.DS_ID) query = "SELECT * from %s:%s" % (self.DS_ID, self.TABLE_ID) @@ -1751,7 +1754,8 @@ def test_update_routine(self): self.assertEqual(req[1]["headers"]["If-Match"], "im-an-etag") def test_update_table(self): - from google.cloud.bigquery.table import Table, SchemaField + from google.cloud.bigquery.schema import SchemaField + from google.cloud.bigquery.table import Table path = "projects/%s/datasets/%s/tables/%s" % ( self.PROJECT, @@ -1894,7 +1898,8 @@ def test_update_table_w_query(self): import datetime from google.cloud._helpers import UTC from google.cloud._helpers import _millis - from google.cloud.bigquery.table import Table, SchemaField + from google.cloud.bigquery.schema import SchemaField + from google.cloud.bigquery.table import Table path = "projects/%s/datasets/%s/tables/%s" % ( self.PROJECT, @@ -2952,8 +2957,26 @@ def test_list_jobs_w_time_filter(self): }, ) + def test_list_jobs_w_parent_job_filter(self): + from google.cloud.bigquery import job + + creds = _make_credentials() + client = self._make_one(self.PROJECT, creds) + conn = client._connection = make_connection({}, {}) + + parent_job_args = ["parent-job-123", job._AsyncJob("parent-job-123", client)] + + for parent_job in parent_job_args: + list(client.list_jobs(parent_job=parent_job)) + conn.api_request.assert_called_once_with( + method="GET", + path="/projects/%s/jobs" % self.PROJECT, + query_params={"projection": "full", "parentJobId": "parent-job-123"}, + ) + conn.api_request.reset_mock() + def test_load_table_from_uri(self): - from google.cloud.bigquery.job import LoadJob + from google.cloud.bigquery.job import LoadJob, LoadJobConfig JOB = "job_name" DESTINATION = "destination_table" @@ -2973,11 +2996,14 @@ def test_load_table_from_uri(self): } creds = _make_credentials() http = object() + job_config = LoadJobConfig() client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) conn = client._connection = make_connection(RESOURCE) destination = client.dataset(self.DS_ID).table(DESTINATION) - job = client.load_table_from_uri(SOURCE_URI, destination, job_id=JOB) + job = client.load_table_from_uri( + SOURCE_URI, destination, job_id=JOB, job_config=job_config + ) # Check that load_table_from_uri actually starts the job. conn.api_request.assert_called_once_with( @@ -2985,6 +3011,7 @@ def test_load_table_from_uri(self): ) self.assertIsInstance(job, LoadJob) + self.assertIsInstance(job._configuration, LoadJobConfig) self.assertIs(job._client, client) self.assertEqual(job.job_id, JOB) self.assertEqual(list(job.source_uris), [SOURCE_URI]) @@ -3080,6 +3107,26 @@ def test_load_table_from_uri_w_client_location(self): method="POST", path="/projects/other-project/jobs", data=resource ) + def test_load_table_from_uri_w_invalid_job_config(self): + from google.cloud.bigquery import job + + JOB = "job_name" + DESTINATION = "destination_table" + SOURCE_URI = "http://example.com/source.csv" + + creds = _make_credentials() + http = object() + job_config = job.CopyJobConfig() + client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) + destination = client.dataset(self.DS_ID).table(DESTINATION) + + with self.assertRaises(TypeError) as exc: + client.load_table_from_uri( + SOURCE_URI, destination, job_id=JOB, job_config=job_config + ) + + self.assertIn("Expected an instance of LoadJobConfig", exc.exception.args[0]) + @staticmethod def _mock_requests_response(status_code, headers, content=b""): return mock.Mock( @@ -3402,6 +3449,66 @@ def test_copy_table_w_source_strings(self): ).table("destination_table") self.assertEqual(job.destination, expected_destination) + def test_copy_table_w_invalid_job_config(self): + from google.cloud.bigquery import job + + JOB = "job_name" + SOURCE = "source_table" + DESTINATION = "destination_table" + + creds = _make_credentials() + http = object() + client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) + job_config = job.ExtractJobConfig() + dataset = client.dataset(self.DS_ID) + source = dataset.table(SOURCE) + destination = dataset.table(DESTINATION) + with self.assertRaises(TypeError) as exc: + client.copy_table(source, destination, job_id=JOB, job_config=job_config) + + self.assertIn("Expected an instance of CopyJobConfig", exc.exception.args[0]) + + def test_copy_table_w_valid_job_config(self): + from google.cloud.bigquery.job import CopyJobConfig + + JOB = "job_name" + SOURCE = "source_table" + DESTINATION = "destination_table" + RESOURCE = { + "jobReference": {"projectId": self.PROJECT, "jobId": JOB}, + "configuration": { + "copy": { + "sourceTables": [ + { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": SOURCE, + } + ], + "destinationTable": { + "projectId": self.PROJECT, + "datasetId": self.DS_ID, + "tableId": DESTINATION, + }, + } + }, + } + creds = _make_credentials() + http = object() + client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) + job_config = CopyJobConfig() + conn = client._connection = make_connection(RESOURCE) + dataset = client.dataset(self.DS_ID) + source = dataset.table(SOURCE) + destination = dataset.table(DESTINATION) + + job = client.copy_table(source, destination, job_id=JOB, job_config=job_config) + # Check that copy_table actually starts the job. + conn.api_request.assert_called_once_with( + method="POST", path="/projects/%s/jobs" % self.PROJECT, data=RESOURCE + ) + self.assertIsInstance(job._configuration, CopyJobConfig) + def test_extract_table(self): from google.cloud.bigquery.job import ExtractJob @@ -3442,6 +3549,24 @@ def test_extract_table(self): self.assertEqual(job.source, source) self.assertEqual(list(job.destination_uris), [DESTINATION]) + def test_extract_table_w_invalid_job_config(self): + from google.cloud.bigquery import job + + JOB = "job_id" + SOURCE = "source_table" + DESTINATION = "gs://bucket_name/object_name" + + creds = _make_credentials() + http = object() + client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) + dataset = client.dataset(self.DS_ID) + source = dataset.table(SOURCE) + job_config = job.LoadJobConfig() + with self.assertRaises(TypeError) as exc: + client.extract_table(source, DESTINATION, job_id=JOB, job_config=job_config) + + self.assertIn("Expected an instance of ExtractJobConfig", exc.exception.args[0]) + def test_extract_table_w_explicit_project(self): job_id = "job_id" source_id = "source_table" @@ -3725,6 +3850,35 @@ def test_query_w_explicit_job_config(self): method="POST", path="/projects/PROJECT/jobs", data=resource ) + def test_query_w_invalid_job_config(self): + from google.cloud.bigquery import QueryJobConfig, DatasetReference + from google.cloud.bigquery import job + + job_id = "some-job-id" + query = "select count(*) from persons" + creds = _make_credentials() + http = object() + default_job_config = QueryJobConfig() + default_job_config.default_dataset = DatasetReference( + self.PROJECT, "some-dataset" + ) + default_job_config.maximum_bytes_billed = 1000 + + client = self._make_one( + project=self.PROJECT, + credentials=creds, + _http=http, + default_query_job_config=default_job_config, + ) + + job_config = job.LoadJobConfig() + + with self.assertRaises(TypeError) as exc: + client.query( + query, job_id=job_id, location=self.LOCATION, job_config=job_config + ) + self.assertIn("Expected an instance of QueryJobConfig", exc.exception.args[0]) + def test_query_w_explicit_job_config_override(self): job_id = "some-job-id" query = "select count(*) from persons" @@ -3819,6 +3973,23 @@ def test_query_w_client_default_config_no_incoming(self): method="POST", path="/projects/PROJECT/jobs", data=resource ) + def test_query_w_invalid_default_job_config(self): + job_id = "some-job-id" + query = "select count(*) from persons" + creds = _make_credentials() + http = object() + default_job_config = object() + client = self._make_one( + project=self.PROJECT, + credentials=creds, + _http=http, + default_query_job_config=default_job_config, + ) + + with self.assertRaises(TypeError) as exc: + client.query(query, job_id=job_id, location=self.LOCATION) + self.assertIn("Expected an instance of QueryJobConfig", exc.exception.args[0]) + def test_query_w_client_location(self): job_id = "some-job-id" query = "select count(*) from persons" @@ -4005,7 +4176,7 @@ def test_insert_rows_w_schema(self): from google.cloud._helpers import UTC from google.cloud._helpers import _datetime_to_rfc3339 from google.cloud._helpers import _microseconds_from_datetime - from google.cloud.bigquery.table import SchemaField + from google.cloud.bigquery.schema import SchemaField WHEN_TS = 1437767599.006 WHEN = datetime.datetime.utcfromtimestamp(WHEN_TS).replace(tzinfo=UTC) @@ -4061,7 +4232,8 @@ def test_insert_rows_w_list_of_dictionaries(self): from google.cloud._helpers import UTC from google.cloud._helpers import _datetime_to_rfc3339 from google.cloud._helpers import _microseconds_from_datetime - from google.cloud.bigquery.table import Table, SchemaField + from google.cloud.bigquery.schema import SchemaField + from google.cloud.bigquery.table import Table WHEN_TS = 1437767599.006 WHEN = datetime.datetime.utcfromtimestamp(WHEN_TS).replace(tzinfo=UTC) @@ -4122,8 +4294,8 @@ def _row_data(row): ) def test_insert_rows_w_list_of_Rows(self): + from google.cloud.bigquery.schema import SchemaField from google.cloud.bigquery.table import Table - from google.cloud.bigquery.table import SchemaField from google.cloud.bigquery.table import Row PATH = "projects/%s/datasets/%s/tables/%s/insertAll" % ( @@ -4167,7 +4339,8 @@ def _row_data(row): ) def test_insert_rows_w_skip_invalid_and_ignore_unknown(self): - from google.cloud.bigquery.table import Table, SchemaField + from google.cloud.bigquery.schema import SchemaField + from google.cloud.bigquery.table import Table PATH = "projects/%s/datasets/%s/tables/%s/insertAll" % ( self.PROJECT, @@ -4243,7 +4416,8 @@ def _row_data(row): ) def test_insert_rows_w_repeated_fields(self): - from google.cloud.bigquery.table import Table, SchemaField + from google.cloud.bigquery.schema import SchemaField + from google.cloud.bigquery.table import Table PATH = "projects/%s/datasets/%s/tables/%s/insertAll" % ( self.PROJECT, @@ -4336,7 +4510,7 @@ def test_insert_rows_w_repeated_fields(self): ) def test_insert_rows_w_record_schema(self): - from google.cloud.bigquery.table import SchemaField + from google.cloud.bigquery.schema import SchemaField PATH = "projects/%s/datasets/%s/tables/%s/insertAll" % ( self.PROJECT, @@ -4404,6 +4578,40 @@ def test_insert_rows_w_record_schema(self): method="POST", path="/%s" % PATH, data=SENT ) + def test_insert_rows_w_explicit_none_insert_ids(self): + from google.cloud.bigquery.schema import SchemaField + from google.cloud.bigquery.table import Table + + PATH = "projects/{}/datasets/{}/tables/{}/insertAll".format( + self.PROJECT, self.DS_ID, self.TABLE_ID, + ) + creds = _make_credentials() + http = object() + client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) + conn = client._connection = make_connection({}) + schema = [ + SchemaField("full_name", "STRING", mode="REQUIRED"), + SchemaField("age", "INTEGER", mode="REQUIRED"), + ] + table = Table(self.TABLE_REF, schema=schema) + ROWS = [ + {"full_name": "Phred Phlyntstone", "age": 32}, + {"full_name": "Bharney Rhubble", "age": 33}, + ] + + def _row_data(row): + row["age"] = str(row["age"]) + return row + + SENT = {"rows": [{"json": _row_data(row), "insertId": None} for row in ROWS]} + + errors = client.insert_rows(table, ROWS, row_ids=[None] * len(ROWS)) + + self.assertEqual(len(errors), 0) + conn.api_request.assert_called_once_with( + method="POST", path="/{}".format(PATH), data=SENT + ) + def test_insert_rows_errors(self): from google.cloud.bigquery.table import Table @@ -4431,6 +4639,7 @@ def test_insert_rows_errors(self): def test_insert_rows_w_numeric(self): from google.cloud.bigquery import table + from google.cloud.bigquery.schema import SchemaField project = "PROJECT" ds_id = "DS_ID" @@ -4440,10 +4649,7 @@ def test_insert_rows_w_numeric(self): client = self._make_one(project=project, credentials=creds, _http=http) conn = client._connection = make_connection({}) table_ref = DatasetReference(project, ds_id).table(table_id) - schema = [ - table.SchemaField("account", "STRING"), - table.SchemaField("balance", "NUMERIC"), - ] + schema = [SchemaField("account", "STRING"), SchemaField("balance", "NUMERIC")] insert_table = table.Table(table_ref, schema=schema) rows = [ ("Savings", decimal.Decimal("23.47")), @@ -4475,7 +4681,7 @@ def test_insert_rows_w_numeric(self): @unittest.skipIf(pandas is None, "Requires `pandas`") def test_insert_rows_from_dataframe(self): - from google.cloud.bigquery.table import SchemaField + from google.cloud.bigquery.schema import SchemaField from google.cloud.bigquery.table import Table API_PATH = "/projects/{}/datasets/{}/tables/{}/insertAll".format( @@ -4551,7 +4757,7 @@ def test_insert_rows_from_dataframe(self): @unittest.skipIf(pandas is None, "Requires `pandas`") def test_insert_rows_from_dataframe_many_columns(self): - from google.cloud.bigquery.table import SchemaField + from google.cloud.bigquery.schema import SchemaField from google.cloud.bigquery.table import Table API_PATH = "/projects/{}/datasets/{}/tables/{}/insertAll".format( @@ -4597,9 +4803,59 @@ def test_insert_rows_from_dataframe_many_columns(self): assert len(actual_calls) == 1 assert actual_calls[0] == expected_call + @unittest.skipIf(pandas is None, "Requires `pandas`") + def test_insert_rows_from_dataframe_w_explicit_none_insert_ids(self): + from google.cloud.bigquery.schema import SchemaField + from google.cloud.bigquery.table import Table + + API_PATH = "/projects/{}/datasets/{}/tables/{}/insertAll".format( + self.PROJECT, self.DS_ID, self.TABLE_REF.table_id + ) + + dataframe = pandas.DataFrame( + [ + {"name": u"Little One", "adult": False}, + {"name": u"Young Gun", "adult": True}, + ] + ) + + # create client + creds = _make_credentials() + http = object() + client = self._make_one(project=self.PROJECT, credentials=creds, _http=http) + conn = client._connection = make_connection({}, {}) + + # create table + schema = [ + SchemaField("name", "STRING", mode="REQUIRED"), + SchemaField("adult", "BOOLEAN", mode="REQUIRED"), + ] + table = Table(self.TABLE_REF, schema=schema) + + error_info = client.insert_rows_from_dataframe( + table, dataframe, row_ids=[None] * len(dataframe) + ) + + self.assertEqual(len(error_info), 1) + assert error_info[0] == [] # no chunk errors + + EXPECTED_SENT_DATA = { + "rows": [ + {"insertId": None, "json": {"name": "Little One", "adult": "false"}}, + {"insertId": None, "json": {"name": "Young Gun", "adult": "true"}}, + ] + } + + actual_calls = conn.api_request.call_args_list + assert len(actual_calls) == 1 + assert actual_calls[0] == mock.call( + method="POST", path=API_PATH, data=EXPECTED_SENT_DATA + ) + def test_insert_rows_json(self): - from google.cloud.bigquery.table import Table, SchemaField from google.cloud.bigquery.dataset import DatasetReference + from google.cloud.bigquery.schema import SchemaField + from google.cloud.bigquery.table import Table PROJECT = "PROJECT" DS_ID = "DS_ID" @@ -4665,6 +4921,27 @@ def test_insert_rows_json_with_string_id(self): data=expected, ) + def test_insert_rows_json_w_explicit_none_insert_ids(self): + rows = [{"col1": "val1"}, {"col2": "val2"}] + creds = _make_credentials() + http = object() + client = self._make_one( + project="default-project", credentials=creds, _http=http + ) + conn = client._connection = make_connection({}) + + errors = client.insert_rows_json( + "proj.dset.tbl", rows, row_ids=[None] * len(rows), + ) + + self.assertEqual(len(errors), 0) + expected = {"rows": [{"json": row, "insertId": None} for row in rows]} + conn.api_request.assert_called_once_with( + method="POST", + path="/projects/proj/datasets/dset/tables/tbl/insertAll", + data=expected, + ) + def test_list_partitions(self): from google.cloud.bigquery.table import Table @@ -4710,8 +4987,8 @@ def test_list_partitions_with_string_id(self): def test_list_rows(self): import datetime from google.cloud._helpers import UTC + from google.cloud.bigquery.schema import SchemaField from google.cloud.bigquery.table import Table - from google.cloud.bigquery.table import SchemaField from google.cloud.bigquery.table import Row PATH = "projects/%s/datasets/%s/tables/%s/data" % ( @@ -4811,7 +5088,8 @@ def test_list_rows_empty_table(self): self.assertEqual(rows.total_rows, 0) def test_list_rows_query_params(self): - from google.cloud.bigquery.table import Table, SchemaField + from google.cloud.bigquery.schema import SchemaField + from google.cloud.bigquery.table import Table creds = _make_credentials() http = object() @@ -4833,7 +5111,7 @@ def test_list_rows_query_params(self): self.assertEqual(req[1]["query_params"], test[1], "for kwargs %s" % test[0]) def test_list_rows_repeated_fields(self): - from google.cloud.bigquery.table import SchemaField + from google.cloud.bigquery.schema import SchemaField PATH = "projects/%s/datasets/%s/tables/%s/data" % ( self.PROJECT, @@ -4893,7 +5171,8 @@ def test_list_rows_repeated_fields(self): ) def test_list_rows_w_record_schema(self): - from google.cloud.bigquery.table import Table, SchemaField + from google.cloud.bigquery.schema import SchemaField + from google.cloud.bigquery.table import Table PATH = "projects/%s/datasets/%s/tables/%s/data" % ( self.PROJECT, @@ -5399,6 +5678,19 @@ def test_load_table_from_file_bad_mode(self): with pytest.raises(ValueError): client.load_table_from_file(file_obj, self.TABLE_REF) + def test_load_table_from_file_w_invalid_job_config(self): + from google.cloud.bigquery import job + + client = self._make_client() + gzip_file = self._make_gzip_file_obj(writable=True) + config = job.QueryJobConfig() + with pytest.raises(TypeError) as exc: + client.load_table_from_file( + gzip_file, self.TABLE_REF, job_id="job_id", job_config=config + ) + err_msg = str(exc.value) + assert "Expected an instance of LoadJobConfig" in err_msg + @unittest.skipIf(pandas is None, "Requires `pandas`") @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_load_table_from_dataframe(self): @@ -5704,8 +5996,7 @@ def test_load_table_from_dataframe_unknown_table(self): ) @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") - def test_load_table_from_dataframe_no_schema_warning(self): + def test_load_table_from_dataframe_no_schema_warning_wo_pyarrow(self): client = self._make_client() # Pick at least one column type that translates to Pandas dtype @@ -5722,9 +6013,12 @@ def test_load_table_from_dataframe_no_schema_warning(self): "google.cloud.bigquery.client.Client.load_table_from_file", autospec=True ) pyarrow_patch = mock.patch("google.cloud.bigquery.client.pyarrow", None) + pyarrow_patch_helpers = mock.patch( + "google.cloud.bigquery._pandas_helpers.pyarrow", None + ) catch_warnings = warnings.catch_warnings(record=True) - with get_table_patch, load_patch, pyarrow_patch, catch_warnings as warned: + with get_table_patch, load_patch, pyarrow_patch, pyarrow_patch_helpers, catch_warnings as warned: client.load_table_from_dataframe( dataframe, self.TABLE_REF, location=self.LOCATION ) @@ -5892,7 +6186,6 @@ def test_load_table_from_dataframe_w_partial_schema_extra_types(self): assert "unknown_col" in message @unittest.skipIf(pandas is None, "Requires `pandas`") - @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_load_table_from_dataframe_w_partial_schema_missing_types(self): from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES from google.cloud.bigquery import job @@ -5909,10 +6202,13 @@ def test_load_table_from_dataframe_w_partial_schema_missing_types(self): load_patch = mock.patch( "google.cloud.bigquery.client.Client.load_table_from_file", autospec=True ) + pyarrow_patch = mock.patch( + "google.cloud.bigquery._pandas_helpers.pyarrow", None + ) schema = (SchemaField("string_col", "STRING"),) job_config = job.LoadJobConfig(schema=schema) - with load_patch as load_table_from_file, warnings.catch_warnings( + with pyarrow_patch, load_patch as load_table_from_file, warnings.catch_warnings( record=True ) as warned: client.load_table_from_dataframe( @@ -6098,6 +6394,24 @@ def test_load_table_from_dataframe_w_nulls(self): assert sent_config.schema == schema assert sent_config.source_format == job.SourceFormat.PARQUET + @unittest.skipIf(pandas is None, "Requires `pandas`") + def test_load_table_from_dataframe_w_invaild_job_config(self): + from google.cloud.bigquery import job + + client = self._make_client() + + records = [{"float_column": 3.14, "struct_column": [{"foo": 1}, {"bar": -1}]}] + dataframe = pandas.DataFrame(data=records) + job_config = job.CopyJobConfig() + + with pytest.raises(TypeError) as exc: + client.load_table_from_dataframe( + dataframe, self.TABLE_REF, job_config=job_config, location=self.LOCATION + ) + + err_msg = str(exc.value) + assert "Expected an instance of LoadJobConfig" in err_msg + def test_load_table_from_json_basic_use(self): from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES from google.cloud.bigquery import job @@ -6186,6 +6500,26 @@ def test_load_table_from_json_non_default_args(self): # all properties should have been cloned and sent to the backend assert sent_config._properties.get("load", {}).get("unknown_field") == "foobar" + def test_load_table_from_json_w_invalid_job_config(self): + from google.cloud.bigquery import job + + client = self._make_client() + json_rows = [ + {"name": "One", "age": 11, "birthday": "2008-09-10", "adult": False}, + {"name": "Two", "age": 22, "birthday": "1997-08-09", "adult": True}, + ] + job_config = job.CopyJobConfig() + with pytest.raises(TypeError) as exc: + client.load_table_from_json( + json_rows, + self.TABLE_REF, + job_config=job_config, + project="project-x", + location="EU", + ) + err_msg = str(exc.value) + assert "Expected an instance of LoadJobConfig" in err_msg + # Low-level tests @classmethod diff --git a/bigquery/tests/unit/test_dataset.py b/bigquery/tests/unit/test_dataset.py index 26b1729a240c..ac13e00932ba 100644 --- a/bigquery/tests/unit/test_dataset.py +++ b/bigquery/tests/unit/test_dataset.py @@ -275,6 +275,7 @@ class TestDataset(unittest.TestCase): PROJECT = "project" DS_ID = "dataset-id" DS_REF = DatasetReference(PROJECT, DS_ID) + KMS_KEY_NAME = "projects/1/locations/us/keyRings/1/cryptoKeys/1" @staticmethod def _get_target_class(): @@ -314,6 +315,7 @@ def _make_resource(self): {"role": "WRITER", "specialGroup": "projectWriters"}, {"role": "READER", "specialGroup": "projectReaders"}, ], + "defaultEncryptionConfiguration": {"kmsKeyName": self.KMS_KEY_NAME}, } def _verify_access_entry(self, access_entries, resource): @@ -369,6 +371,13 @@ def _verify_resource_properties(self, dataset, resource): self.assertEqual(dataset.description, resource.get("description")) self.assertEqual(dataset.friendly_name, resource.get("friendlyName")) self.assertEqual(dataset.location, resource.get("location")) + if "defaultEncryptionConfiguration" in resource: + self.assertEqual( + dataset.default_encryption_configuration.kms_key_name, + resource.get("defaultEncryptionConfiguration")["kmsKeyName"], + ) + else: + self.assertIsNone(dataset.default_encryption_configuration) if "access" in resource: self._verify_access_entry(dataset.access_entries, resource) @@ -454,6 +463,14 @@ def test_access_entries_setter(self): dataset.access_entries = [phred, bharney] self.assertEqual(dataset.access_entries, [phred, bharney]) + def test_default_partition_expiration_ms(self): + dataset = self._make_one("proj.dset") + assert dataset.default_partition_expiration_ms is None + dataset.default_partition_expiration_ms = 12345 + assert dataset.default_partition_expiration_ms == 12345 + dataset.default_partition_expiration_ms = None + assert dataset.default_partition_expiration_ms is None + def test_default_table_expiration_ms_setter_bad_value(self): dataset = self._make_one(self.DS_REF) with self.assertRaises(ValueError): @@ -550,6 +567,22 @@ def test_to_api_repr_w_custom_field(self): } self.assertEqual(resource, exp_resource) + def test_default_encryption_configuration_setter(self): + from google.cloud.bigquery.encryption_configuration import ( + EncryptionConfiguration, + ) + + dataset = self._make_one(self.DS_REF) + encryption_configuration = EncryptionConfiguration( + kms_key_name=self.KMS_KEY_NAME + ) + dataset.default_encryption_configuration = encryption_configuration + self.assertEqual( + dataset.default_encryption_configuration.kms_key_name, self.KMS_KEY_NAME + ) + dataset.default_encryption_configuration = None + self.assertIsNone(dataset.default_encryption_configuration) + def test_from_string(self): cls = self._get_target_class() got = cls.from_string("string-project.string_dataset") diff --git a/bigquery/tests/unit/test_encryption_configuration.py b/bigquery/tests/unit/test_encryption_configuration.py new file mode 100644 index 000000000000..f432a903b4cc --- /dev/null +++ b/bigquery/tests/unit/test_encryption_configuration.py @@ -0,0 +1,111 @@ +# Copyright 2015 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest +import mock + + +class TestEncryptionConfiguration(unittest.TestCase): + KMS_KEY_NAME = "projects/1/locations/us/keyRings/1/cryptoKeys/1" + + @staticmethod + def _get_target_class(): + from google.cloud.bigquery.encryption_configuration import ( + EncryptionConfiguration, + ) + + return EncryptionConfiguration + + def _make_one(self, *args, **kw): + return self._get_target_class()(*args, **kw) + + def test_ctor_defaults(self): + encryption_config = self._make_one() + self.assertIsNone(encryption_config.kms_key_name) + + def test_ctor_with_key(self): + encryption_config = self._make_one(kms_key_name=self.KMS_KEY_NAME) + self.assertEqual(encryption_config.kms_key_name, self.KMS_KEY_NAME) + + def test_kms_key_name_setter(self): + encryption_config = self._make_one() + self.assertIsNone(encryption_config.kms_key_name) + encryption_config.kms_key_name = self.KMS_KEY_NAME + self.assertEqual(encryption_config.kms_key_name, self.KMS_KEY_NAME) + encryption_config.kms_key_name = None + self.assertIsNone(encryption_config.kms_key_name) + + def test_from_api_repr(self): + RESOURCE = {"kmsKeyName": self.KMS_KEY_NAME} + klass = self._get_target_class() + encryption_config = klass.from_api_repr(RESOURCE) + self.assertEqual(encryption_config.kms_key_name, self.KMS_KEY_NAME) + + def test_to_api_repr(self): + encryption_config = self._make_one(kms_key_name=self.KMS_KEY_NAME) + resource = encryption_config.to_api_repr() + self.assertEqual(resource, {"kmsKeyName": self.KMS_KEY_NAME}) + + def test___eq___wrong_type(self): + encryption_config = self._make_one() + other = object() + self.assertNotEqual(encryption_config, other) + self.assertEqual(encryption_config, mock.ANY) + + def test___eq___kms_key_name_mismatch(self): + encryption_config = self._make_one() + other = self._make_one(self.KMS_KEY_NAME) + self.assertNotEqual(encryption_config, other) + + def test___eq___hit(self): + encryption_config = self._make_one(self.KMS_KEY_NAME) + other = self._make_one(self.KMS_KEY_NAME) + self.assertEqual(encryption_config, other) + + def test___ne___wrong_type(self): + encryption_config = self._make_one() + other = object() + self.assertNotEqual(encryption_config, other) + self.assertEqual(encryption_config, mock.ANY) + + def test___ne___same_value(self): + encryption_config1 = self._make_one(self.KMS_KEY_NAME) + encryption_config2 = self._make_one(self.KMS_KEY_NAME) + # unittest ``assertEqual`` uses ``==`` not ``!=``. + comparison_val = encryption_config1 != encryption_config2 + self.assertFalse(comparison_val) + + def test___ne___different_values(self): + encryption_config1 = self._make_one() + encryption_config2 = self._make_one(self.KMS_KEY_NAME) + self.assertNotEqual(encryption_config1, encryption_config2) + + def test___hash__set_equality(self): + encryption_config1 = self._make_one(self.KMS_KEY_NAME) + encryption_config2 = self._make_one(self.KMS_KEY_NAME) + set_one = {encryption_config1, encryption_config2} + set_two = {encryption_config1, encryption_config2} + self.assertEqual(set_one, set_two) + + def test___hash__not_equals(self): + encryption_config1 = self._make_one() + encryption_config2 = self._make_one(self.KMS_KEY_NAME) + set_one = {encryption_config1} + set_two = {encryption_config2} + self.assertNotEqual(set_one, set_two) + + def test___repr__(self): + encryption_config = self._make_one(self.KMS_KEY_NAME) + expected = "EncryptionConfiguration({})".format(self.KMS_KEY_NAME) + self.assertEqual(repr(encryption_config), expected) diff --git a/bigquery/tests/unit/test_external_config.py b/bigquery/tests/unit/test_external_config.py index ddf95e317969..dab4391cbe04 100644 --- a/bigquery/tests/unit/test_external_config.py +++ b/bigquery/tests/unit/test_external_config.py @@ -130,7 +130,10 @@ def test_from_api_repr_sheets(self): self.BASE_RESOURCE, { "sourceFormat": "GOOGLE_SHEETS", - "googleSheetsOptions": {"skipLeadingRows": "123"}, + "googleSheetsOptions": { + "skipLeadingRows": "123", + "range": "Sheet1!A5:B10", + }, }, ) @@ -140,14 +143,17 @@ def test_from_api_repr_sheets(self): self.assertEqual(ec.source_format, "GOOGLE_SHEETS") self.assertIsInstance(ec.options, external_config.GoogleSheetsOptions) self.assertEqual(ec.options.skip_leading_rows, 123) + self.assertEqual(ec.options.range, "Sheet1!A5:B10") got_resource = ec.to_api_repr() self.assertEqual(got_resource, resource) del resource["googleSheetsOptions"]["skipLeadingRows"] + del resource["googleSheetsOptions"]["range"] ec = external_config.ExternalConfig.from_api_repr(resource) self.assertIsNone(ec.options.skip_leading_rows) + self.assertIsNone(ec.options.range) got_resource = ec.to_api_repr() self.assertEqual(got_resource, resource) @@ -155,11 +161,12 @@ def test_to_api_repr_sheets(self): ec = external_config.ExternalConfig("GOOGLE_SHEETS") options = external_config.GoogleSheetsOptions() options.skip_leading_rows = 123 + options.range = "Sheet1!A5:B10" ec._options = options exp_resource = { "sourceFormat": "GOOGLE_SHEETS", - "googleSheetsOptions": {"skipLeadingRows": "123"}, + "googleSheetsOptions": {"skipLeadingRows": "123", "range": "Sheet1!A5:B10"}, } got_resource = ec.to_api_repr() diff --git a/bigquery/tests/unit/test_job.py b/bigquery/tests/unit/test_job.py index b34184f00cd9..a2aeb5efbc4a 100644 --- a/bigquery/tests/unit/test_job.py +++ b/bigquery/tests/unit/test_job.py @@ -268,6 +268,53 @@ def test_job_type(self): self.assertEqual(derived.job_type, "derived") + def test_parent_job_id(self): + client = _make_client(project=self.PROJECT) + job = self._make_one(self.JOB_ID, client) + + self.assertIsNone(job.parent_job_id) + job._properties["statistics"] = {"parentJobId": "parent-job-123"} + self.assertEqual(job.parent_job_id, "parent-job-123") + + def test_script_statistics(self): + client = _make_client(project=self.PROJECT) + job = self._make_one(self.JOB_ID, client) + + self.assertIsNone(job.script_statistics) + job._properties["statistics"] = { + "scriptStatistics": { + "evaluationKind": "EXPRESSION", + "stackFrames": [ + { + "startLine": 5, + "startColumn": 29, + "endLine": 9, + "endColumn": 14, + "text": "QUERY TEXT", + } + ], + } + } + script_stats = job.script_statistics + self.assertEqual(script_stats.evaluation_kind, "EXPRESSION") + stack_frames = script_stats.stack_frames + self.assertEqual(len(stack_frames), 1) + stack_frame = stack_frames[0] + self.assertIsNone(stack_frame.procedure_id) + self.assertEqual(stack_frame.start_line, 5) + self.assertEqual(stack_frame.start_column, 29) + self.assertEqual(stack_frame.end_line, 9) + self.assertEqual(stack_frame.end_column, 14) + self.assertEqual(stack_frame.text, "QUERY TEXT") + + def test_num_child_jobs(self): + client = _make_client(project=self.PROJECT) + job = self._make_one(self.JOB_ID, client) + + self.assertEqual(job.num_child_jobs, 0) + job._properties["statistics"] = {"numChildJobs": "17"} + self.assertEqual(job.num_child_jobs, 17) + def test_labels_miss(self): client = _make_client(project=self.PROJECT) job = self._make_one(self.JOB_ID, client) @@ -796,7 +843,7 @@ def test__set_future_result_w_done_wo_result_set_w_error(self): set_exception.assert_called_once() args, kw = set_exception.call_args - exception, = args + (exception,) = args self.assertIsInstance(exception, NotFound) self.assertEqual(exception.message, "testing") self.assertEqual(kw, {}) @@ -1030,7 +1077,7 @@ class _Base(object): TABLE_ID = "table_id" TABLE_REF = TableReference(DS_REF, TABLE_ID) JOB_ID = "JOB_ID" - KMS_KEY_NAME = "projects/1/locations/global/keyRings/1/cryptoKeys/1" + KMS_KEY_NAME = "projects/1/locations/us/keyRings/1/cryptoKeys/1" def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) @@ -1229,7 +1276,9 @@ def test_destination_encryption_configuration_missing(self): self.assertIsNone(config.destination_encryption_configuration) def test_destination_encryption_configuration_hit(self): - from google.cloud.bigquery.table import EncryptionConfiguration + from google.cloud.bigquery.encryption_configuration import ( + EncryptionConfiguration, + ) kms_key_name = "kms-key-name" encryption_configuration = EncryptionConfiguration(kms_key_name) @@ -1242,7 +1291,9 @@ def test_destination_encryption_configuration_hit(self): ) def test_destination_encryption_configuration_setter(self): - from google.cloud.bigquery.table import EncryptionConfiguration + from google.cloud.bigquery.encryption_configuration import ( + EncryptionConfiguration, + ) kms_key_name = "kms-key-name" encryption_configuration = EncryptionConfiguration(kms_key_name) @@ -1481,7 +1532,7 @@ def test_schema_hit(self): self.assertEqual(all_props, SchemaField.from_api_repr(all_props_repr)) self.assertEqual(minimal, SchemaField.from_api_repr(minimal_repr)) - def test_schema_setter(self): + def test_schema_setter_fields(self): from google.cloud.bigquery.schema import SchemaField config = self._get_target_class()() @@ -1504,6 +1555,42 @@ def test_schema_setter(self): config._properties["load"]["schema"], {"fields": [full_name_repr, age_repr]} ) + def test_schema_setter_valid_mappings_list(self): + config = self._get_target_class()() + + schema = [ + {"name": "full_name", "type": "STRING", "mode": "REQUIRED"}, + {"name": "age", "type": "INTEGER", "mode": "REQUIRED"}, + ] + config.schema = schema + + full_name_repr = { + "name": "full_name", + "type": "STRING", + "mode": "REQUIRED", + "description": None, + } + age_repr = { + "name": "age", + "type": "INTEGER", + "mode": "REQUIRED", + "description": None, + } + self.assertEqual( + config._properties["load"]["schema"], {"fields": [full_name_repr, age_repr]} + ) + + def test_schema_setter_invalid_mappings_list(self): + config = self._get_target_class()() + + schema = [ + {"name": "full_name", "type": "STRING", "mode": "REQUIRED"}, + {"name": "age", "typeoo": "INTEGER", "mode": "REQUIRED"}, + ] + + with self.assertRaises(Exception): + config.schema = schema + def test_schema_setter_unsetting_schema(self): from google.cloud.bigquery.schema import SchemaField @@ -1587,6 +1674,44 @@ def test_source_format_setter(self): config.source_format = source_format self.assertEqual(config._properties["load"]["sourceFormat"], source_format) + def test_range_partitioning_w_none(self): + object_under_test = self._get_target_class()() + assert object_under_test.range_partitioning is None + + def test_range_partitioning_w_value(self): + object_under_test = self._get_target_class()() + object_under_test._properties["load"]["rangePartitioning"] = { + "field": "column_one", + "range": {"start": 1, "end": 1000, "interval": 10}, + } + object_under_test.range_partitioning.field == "column_one" + object_under_test.range_partitioning.range_.start == 1 + object_under_test.range_partitioning.range_.end == 1000 + object_under_test.range_partitioning.range_.interval == 10 + + def test_range_partitioning_setter(self): + from google.cloud.bigquery.table import PartitionRange + from google.cloud.bigquery.table import RangePartitioning + + object_under_test = self._get_target_class()() + object_under_test.range_partitioning = RangePartitioning( + field="column_one", range_=PartitionRange(start=1, end=1000, interval=10) + ) + object_under_test.range_partitioning.field == "column_one" + object_under_test.range_partitioning.range_.start == 1 + object_under_test.range_partitioning.range_.end == 1000 + object_under_test.range_partitioning.range_.interval == 10 + + def test_range_partitioning_setter_w_none(self): + object_under_test = self._get_target_class()() + object_under_test.range_partitioning = None + assert object_under_test.range_partitioning is None + + def test_range_partitioning_setter_w_wrong_type(self): + object_under_test = self._get_target_class()() + with pytest.raises(ValueError, match="RangePartitioning"): + object_under_test.range_partitioning = object() + def test_time_partitioning_miss(self): config = self._get_target_class()() self.assertIsNone(config.time_partitioning) @@ -1841,6 +1966,7 @@ def test_ctor(self): self.assertIsNone(job.destination_encryption_configuration) self.assertIsNone(job.destination_table_description) self.assertIsNone(job.destination_table_friendly_name) + self.assertIsNone(job.range_partitioning) self.assertIsNone(job.time_partitioning) self.assertIsNone(job.use_avro_logical_types) self.assertIsNone(job.clustering_fields) @@ -2439,7 +2565,9 @@ def test_ctor_w_properties(self): self.assertEqual(config.write_disposition, write_disposition) def test_to_api_repr_with_encryption(self): - from google.cloud.bigquery.table import EncryptionConfiguration + from google.cloud.bigquery.encryption_configuration import ( + EncryptionConfiguration, + ) config = self._make_one() config.destination_encryption_configuration = EncryptionConfiguration( @@ -3275,6 +3403,44 @@ def test_destinaton_w_string(self): expected = table.TableReference.from_string(destination) self.assertEqual(config.destination, expected) + def test_range_partitioning_w_none(self): + object_under_test = self._get_target_class()() + assert object_under_test.range_partitioning is None + + def test_range_partitioning_w_value(self): + object_under_test = self._get_target_class()() + object_under_test._properties["query"]["rangePartitioning"] = { + "field": "column_one", + "range": {"start": 1, "end": 1000, "interval": 10}, + } + object_under_test.range_partitioning.field == "column_one" + object_under_test.range_partitioning.range_.start == 1 + object_under_test.range_partitioning.range_.end == 1000 + object_under_test.range_partitioning.range_.interval == 10 + + def test_range_partitioning_setter(self): + from google.cloud.bigquery.table import PartitionRange + from google.cloud.bigquery.table import RangePartitioning + + object_under_test = self._get_target_class()() + object_under_test.range_partitioning = RangePartitioning( + field="column_one", range_=PartitionRange(start=1, end=1000, interval=10) + ) + object_under_test.range_partitioning.field == "column_one" + object_under_test.range_partitioning.range_.start == 1 + object_under_test.range_partitioning.range_.end == 1000 + object_under_test.range_partitioning.range_.interval == 10 + + def test_range_partitioning_setter_w_none(self): + object_under_test = self._get_target_class()() + object_under_test.range_partitioning = None + assert object_under_test.range_partitioning is None + + def test_range_partitioning_setter_w_wrong_type(self): + object_under_test = self._get_target_class()() + with pytest.raises(ValueError, match="RangePartitioning"): + object_under_test.range_partitioning = object() + def test_time_partitioning(self): from google.cloud.bigquery import table @@ -3364,7 +3530,9 @@ def test_to_api_repr_normal(self): self.assertEqual(resource["someNewProperty"], "Woohoo, alpha stuff.") def test_to_api_repr_with_encryption(self): - from google.cloud.bigquery.table import EncryptionConfiguration + from google.cloud.bigquery.encryption_configuration import ( + EncryptionConfiguration, + ) config = self._make_one() config.destination_encryption_configuration = EncryptionConfiguration( @@ -3573,6 +3741,7 @@ def test_ctor_defaults(self): self.assertIsNone(job.maximum_bytes_billed) self.assertIsNone(job.table_definitions) self.assertIsNone(job.destination_encryption_configuration) + self.assertIsNone(job.range_partitioning) self.assertIsNone(job.time_partitioning) self.assertIsNone(job.clustering_fields) self.assertIsNone(job.schema_update_options) @@ -4337,8 +4506,10 @@ def test_result_error(self): self.assertIsInstance(exc_info.exception, exceptions.GoogleCloudError) self.assertEqual(exc_info.exception.code, http_client.BAD_REQUEST) - full_text = str(exc_info.exception) + exc_job_instance = getattr(exc_info.exception, "query_job", None) + self.assertIs(exc_job_instance, job) + full_text = str(exc_info.exception) assert job.job_id in full_text assert "Query Job SQL Follows" in full_text @@ -4370,8 +4541,10 @@ def test__begin_error(self): self.assertIsInstance(exc_info.exception, exceptions.GoogleCloudError) self.assertEqual(exc_info.exception.code, http_client.BAD_REQUEST) - full_text = str(exc_info.exception) + exc_job_instance = getattr(exc_info.exception, "query_job", None) + self.assertIs(exc_job_instance, job) + full_text = str(exc_info.exception) assert job.job_id in full_text assert "Query Job SQL Follows" in full_text @@ -5319,6 +5492,92 @@ def test_end(self): self.assertEqual(entry.end.strftime(_RFC3339_MICROS), self.END_RFC3339_MICROS) +class TestScriptStackFrame(unittest.TestCase, _Base): + def _make_one(self, resource): + from google.cloud.bigquery.job import ScriptStackFrame + + return ScriptStackFrame(resource) + + def test_procedure_id(self): + frame = self._make_one({"procedureId": "some-procedure"}) + self.assertEqual(frame.procedure_id, "some-procedure") + del frame._properties["procedureId"] + self.assertIsNone(frame.procedure_id) + + def test_start_line(self): + frame = self._make_one({"startLine": 5}) + self.assertEqual(frame.start_line, 5) + frame._properties["startLine"] = "5" + self.assertEqual(frame.start_line, 5) + + def test_start_column(self): + frame = self._make_one({"startColumn": 29}) + self.assertEqual(frame.start_column, 29) + frame._properties["startColumn"] = "29" + self.assertEqual(frame.start_column, 29) + + def test_end_line(self): + frame = self._make_one({"endLine": 9}) + self.assertEqual(frame.end_line, 9) + frame._properties["endLine"] = "9" + self.assertEqual(frame.end_line, 9) + + def test_end_column(self): + frame = self._make_one({"endColumn": 14}) + self.assertEqual(frame.end_column, 14) + frame._properties["endColumn"] = "14" + self.assertEqual(frame.end_column, 14) + + def test_text(self): + frame = self._make_one({"text": "QUERY TEXT"}) + self.assertEqual(frame.text, "QUERY TEXT") + + +class TestScriptStatistics(unittest.TestCase, _Base): + def _make_one(self, resource): + from google.cloud.bigquery.job import ScriptStatistics + + return ScriptStatistics(resource) + + def test_evalutation_kind(self): + stats = self._make_one({"evaluationKind": "EXPRESSION"}) + self.assertEqual(stats.evaluation_kind, "EXPRESSION") + self.assertEqual(stats.stack_frames, []) + + def test_stack_frames(self): + stats = self._make_one( + { + "stackFrames": [ + { + "procedureId": "some-procedure", + "startLine": 5, + "startColumn": 29, + "endLine": 9, + "endColumn": 14, + "text": "QUERY TEXT", + }, + {}, + ] + } + ) + stack_frames = stats.stack_frames + self.assertEqual(len(stack_frames), 2) + stack_frame = stack_frames[0] + self.assertEqual(stack_frame.procedure_id, "some-procedure") + self.assertEqual(stack_frame.start_line, 5) + self.assertEqual(stack_frame.start_column, 29) + self.assertEqual(stack_frame.end_line, 9) + self.assertEqual(stack_frame.end_column, 14) + self.assertEqual(stack_frame.text, "QUERY TEXT") + stack_frame = stack_frames[1] + self.assertIsNone(stack_frame.procedure_id) + self.assertIsNone(stack_frame.start_line) + self.assertIsNone(stack_frame.start_column) + self.assertIsNone(stack_frame.end_line) + self.assertIsNone(stack_frame.end_column) + self.assertIsNone(stack_frame.text) + + class TestTimelineEntry(unittest.TestCase, _Base): ELAPSED_MS = 101 ACTIVE_UNITS = 50 diff --git a/bigquery/tests/unit/test_magics.py b/bigquery/tests/unit/test_magics.py index ec642ff384e1..ed253636c468 100644 --- a/bigquery/tests/unit/test_magics.py +++ b/bigquery/tests/unit/test_magics.py @@ -902,6 +902,37 @@ def test_bigquery_magic_dryrun_option_saves_query_job_to_variable(): assert isinstance(q_job, job.QueryJob) +@pytest.mark.usefixtures("ipython_interactive") +def test_bigquery_magic_saves_query_job_to_variable_on_error(): + ip = IPython.get_ipython() + ip.extension_manager.load_extension("google.cloud.bigquery") + magics.context.credentials = mock.create_autospec( + google.auth.credentials.Credentials, instance=True + ) + + client_query_patch = mock.patch( + "google.cloud.bigquery.client.Client.query", autospec=True + ) + + query_job = mock.create_autospec(job.QueryJob, instance=True) + exception = Exception("Unexpected SELECT") + exception.query_job = query_job + query_job.result.side_effect = exception + + sql = "SELECT SELECT 17 AS num" + + assert "result" not in ip.user_ns + + with client_query_patch as client_query_mock: + client_query_mock.return_value = query_job + return_value = ip.run_cell_magic("bigquery", "result", sql) + + assert return_value is None + assert "result" in ip.user_ns + result = ip.user_ns["result"] + assert isinstance(result, job.QueryJob) + + @pytest.mark.usefixtures("ipython_interactive") def test_bigquery_magic_w_maximum_bytes_billed_invalid(): ip = IPython.get_ipython() diff --git a/bigquery/tests/unit/test_schema.py b/bigquery/tests/unit/test_schema.py index 862d8a823e62..fc8a41c68c46 100644 --- a/bigquery/tests/unit/test_schema.py +++ b/bigquery/tests/unit/test_schema.py @@ -568,3 +568,69 @@ def test_w_subfields(self): ], }, ) + + +class Test_to_schema_fields(unittest.TestCase): + @staticmethod + def _call_fut(schema): + from google.cloud.bigquery.schema import _to_schema_fields + + return _to_schema_fields(schema) + + def test_invalid_type(self): + schema = [ + ("full_name", "STRING", "REQUIRED"), + ("address", "STRING", "REQUIRED"), + ] + with self.assertRaises(ValueError): + self._call_fut(schema) + + def test_schema_fields_sequence(self): + from google.cloud.bigquery.schema import SchemaField + + schema = [ + SchemaField("full_name", "STRING", mode="REQUIRED"), + SchemaField("age", "INT64", mode="NULLABLE"), + ] + result = self._call_fut(schema) + self.assertEqual(result, schema) + + def test_invalid_mapping_representation(self): + schema = [ + {"name": "full_name", "type": "STRING", "mode": "REQUIRED"}, + {"name": "address", "typeooo": "STRING", "mode": "REQUIRED"}, + ] + with self.assertRaises(Exception): + self._call_fut(schema) + + def test_valid_mapping_representation(self): + from google.cloud.bigquery.schema import SchemaField + + schema = [ + {"name": "full_name", "type": "STRING", "mode": "REQUIRED"}, + { + "name": "residence", + "type": "STRUCT", + "mode": "NULLABLE", + "fields": [ + {"name": "foo", "type": "DATE", "mode": "NULLABLE"}, + {"name": "bar", "type": "BYTES", "mode": "REQUIRED"}, + ], + }, + ] + + expected_schema = [ + SchemaField("full_name", "STRING", mode="REQUIRED"), + SchemaField( + "residence", + "STRUCT", + mode="NULLABLE", + fields=[ + SchemaField("foo", "DATE", mode="NULLABLE"), + SchemaField("bar", "BYTES", mode="REQUIRED"), + ], + ), + ] + + result = self._call_fut(schema) + self.assertEqual(result, expected_schema) diff --git a/bigquery/tests/unit/test_table.py b/bigquery/tests/unit/test_table.py index 562bcf6b4e7d..97a7b4ae745e 100644 --- a/bigquery/tests/unit/test_table.py +++ b/bigquery/tests/unit/test_table.py @@ -71,7 +71,7 @@ def _verifySchema(self, schema, resource): class TestEncryptionConfiguration(unittest.TestCase): - KMS_KEY_NAME = "projects/1/locations/global/keyRings/1/cryptoKeys/1" + KMS_KEY_NAME = "projects/1/locations/us/keyRings/1/cryptoKeys/1" @staticmethod def _get_target_class(): @@ -90,78 +90,6 @@ def test_ctor_with_key(self): encryption_config = self._make_one(kms_key_name=self.KMS_KEY_NAME) self.assertEqual(encryption_config.kms_key_name, self.KMS_KEY_NAME) - def test_kms_key_name_setter(self): - encryption_config = self._make_one() - self.assertIsNone(encryption_config.kms_key_name) - encryption_config.kms_key_name = self.KMS_KEY_NAME - self.assertEqual(encryption_config.kms_key_name, self.KMS_KEY_NAME) - encryption_config.kms_key_name = None - self.assertIsNone(encryption_config.kms_key_name) - - def test_from_api_repr(self): - RESOURCE = {"kmsKeyName": self.KMS_KEY_NAME} - klass = self._get_target_class() - encryption_config = klass.from_api_repr(RESOURCE) - self.assertEqual(encryption_config.kms_key_name, self.KMS_KEY_NAME) - - def test_to_api_repr(self): - encryption_config = self._make_one(kms_key_name=self.KMS_KEY_NAME) - resource = encryption_config.to_api_repr() - self.assertEqual(resource, {"kmsKeyName": self.KMS_KEY_NAME}) - - def test___eq___wrong_type(self): - encryption_config = self._make_one() - other = object() - self.assertNotEqual(encryption_config, other) - self.assertEqual(encryption_config, mock.ANY) - - def test___eq___kms_key_name_mismatch(self): - encryption_config = self._make_one() - other = self._make_one(self.KMS_KEY_NAME) - self.assertNotEqual(encryption_config, other) - - def test___eq___hit(self): - encryption_config = self._make_one(self.KMS_KEY_NAME) - other = self._make_one(self.KMS_KEY_NAME) - self.assertEqual(encryption_config, other) - - def test___ne___wrong_type(self): - encryption_config = self._make_one() - other = object() - self.assertNotEqual(encryption_config, other) - self.assertEqual(encryption_config, mock.ANY) - - def test___ne___same_value(self): - encryption_config1 = self._make_one(self.KMS_KEY_NAME) - encryption_config2 = self._make_one(self.KMS_KEY_NAME) - # unittest ``assertEqual`` uses ``==`` not ``!=``. - comparison_val = encryption_config1 != encryption_config2 - self.assertFalse(comparison_val) - - def test___ne___different_values(self): - encryption_config1 = self._make_one() - encryption_config2 = self._make_one(self.KMS_KEY_NAME) - self.assertNotEqual(encryption_config1, encryption_config2) - - def test___hash__set_equality(self): - encryption_config1 = self._make_one(self.KMS_KEY_NAME) - encryption_config2 = self._make_one(self.KMS_KEY_NAME) - set_one = {encryption_config1, encryption_config2} - set_two = {encryption_config1, encryption_config2} - self.assertEqual(set_one, set_two) - - def test___hash__not_equals(self): - encryption_config1 = self._make_one() - encryption_config2 = self._make_one(self.KMS_KEY_NAME) - set_one = {encryption_config1} - set_two = {encryption_config2} - self.assertNotEqual(set_one, set_two) - - def test___repr__(self): - encryption_config = self._make_one(self.KMS_KEY_NAME) - expected = "EncryptionConfiguration({})".format(self.KMS_KEY_NAME) - self.assertEqual(repr(encryption_config), expected) - class TestTableReference(unittest.TestCase): @staticmethod @@ -339,7 +267,7 @@ class TestTable(unittest.TestCase, _SchemaBase): PROJECT = "prahj-ekt" DS_ID = "dataset-name" TABLE_NAME = "table-name" - KMS_KEY_NAME = "projects/1/locations/global/keyRings/1/cryptoKeys/1" + KMS_KEY_NAME = "projects/1/locations/us/keyRings/1/cryptoKeys/1" @staticmethod def _get_target_class(): @@ -522,7 +450,7 @@ def test_ctor(self): self.assertIsNone(table.clustering_fields) def test_ctor_w_schema(self): - from google.cloud.bigquery.table import SchemaField + from google.cloud.bigquery.schema import SchemaField dataset = DatasetReference(self.PROJECT, self.DS_ID) table_ref = dataset.table(self.TABLE_NAME) @@ -628,7 +556,7 @@ def test_num_rows_getter(self): with self.assertRaises(ValueError): getattr(table, "num_rows") - def test_schema_setter_non_list(self): + def test_schema_setter_non_sequence(self): dataset = DatasetReference(self.PROJECT, self.DS_ID) table_ref = dataset.table(self.TABLE_NAME) table = self._make_one(table_ref) @@ -636,7 +564,7 @@ def test_schema_setter_non_list(self): table.schema = object() def test_schema_setter_invalid_field(self): - from google.cloud.bigquery.table import SchemaField + from google.cloud.bigquery.schema import SchemaField dataset = DatasetReference(self.PROJECT, self.DS_ID) table_ref = dataset.table(self.TABLE_NAME) @@ -645,8 +573,8 @@ def test_schema_setter_invalid_field(self): with self.assertRaises(ValueError): table.schema = [full_name, object()] - def test_schema_setter(self): - from google.cloud.bigquery.table import SchemaField + def test_schema_setter_valid_fields(self): + from google.cloud.bigquery.schema import SchemaField dataset = DatasetReference(self.PROJECT, self.DS_ID) table_ref = dataset.table(self.TABLE_NAME) @@ -656,6 +584,48 @@ def test_schema_setter(self): table.schema = [full_name, age] self.assertEqual(table.schema, [full_name, age]) + def test_schema_setter_invalid_mapping_representation(self): + dataset = DatasetReference(self.PROJECT, self.DS_ID) + table_ref = dataset.table(self.TABLE_NAME) + table = self._make_one(table_ref) + full_name = {"name": "full_name", "type": "STRING", "mode": "REQUIRED"} + invalid_field = {"name": "full_name", "typeooo": "STRING", "mode": "REQUIRED"} + with self.assertRaises(Exception): + table.schema = [full_name, invalid_field] + + def test_schema_setter_valid_mapping_representation(self): + from google.cloud.bigquery.schema import SchemaField + + dataset = DatasetReference(self.PROJECT, self.DS_ID) + table_ref = dataset.table(self.TABLE_NAME) + table = self._make_one(table_ref) + full_name = {"name": "full_name", "type": "STRING", "mode": "REQUIRED"} + job_status = { + "name": "is_employed", + "type": "STRUCT", + "mode": "NULLABLE", + "fields": [ + {"name": "foo", "type": "DATE", "mode": "NULLABLE"}, + {"name": "bar", "type": "BYTES", "mode": "REQUIRED"}, + ], + } + + table.schema = [full_name, job_status] + + expected_schema = [ + SchemaField("full_name", "STRING", mode="REQUIRED"), + SchemaField( + "is_employed", + "STRUCT", + mode="NULLABLE", + fields=[ + SchemaField("foo", "DATE", mode="NULLABLE"), + SchemaField("bar", "BYTES", mode="REQUIRED"), + ], + ), + ] + self.assertEqual(table.schema, expected_schema) + def test_props_set_by_server(self): import datetime from google.cloud._helpers import UTC @@ -928,6 +898,40 @@ def test__build_resource_w_custom_field_not_in__properties(self): with self.assertRaises(ValueError): table._build_resource(["bad"]) + def test_range_partitioning(self): + from google.cloud.bigquery.table import RangePartitioning + from google.cloud.bigquery.table import PartitionRange + + table = self._make_one("proj.dset.tbl") + assert table.range_partitioning is None + + table.range_partitioning = RangePartitioning( + field="col1", range_=PartitionRange(start=-512, end=1024, interval=128) + ) + assert table.range_partitioning.field == "col1" + assert table.range_partitioning.range_.start == -512 + assert table.range_partitioning.range_.end == 1024 + assert table.range_partitioning.range_.interval == 128 + + table.range_partitioning = None + assert table.range_partitioning is None + + def test_range_partitioning_w_wrong_type(self): + object_under_test = self._make_one("proj.dset.tbl") + with pytest.raises(ValueError, match="RangePartitioning"): + object_under_test.range_partitioning = object() + + def test_require_partitioning_filter(self): + table = self._make_one("proj.dset.tbl") + assert table.require_partition_filter is None + table.require_partition_filter = True + assert table.require_partition_filter + table.require_partition_filter = False + assert table.require_partition_filter is not None + assert not table.require_partition_filter + table.require_partition_filter = None + assert table.require_partition_filter is None + def test_time_partitioning_getter(self): from google.cloud.bigquery.table import TimePartitioning from google.cloud.bigquery.table import TimePartitioningType @@ -946,7 +950,12 @@ def test_time_partitioning_getter(self): self.assertEqual(table.time_partitioning.type_, TimePartitioningType.DAY) self.assertEqual(table.time_partitioning.field, "col1") self.assertEqual(table.time_partitioning.expiration_ms, 123456) - self.assertFalse(table.time_partitioning.require_partition_filter) + + with warnings.catch_warnings(record=True) as warned: + self.assertFalse(table.time_partitioning.require_partition_filter) + + assert len(warned) == 1 + self.assertIs(warned[0].category, PendingDeprecationWarning) def test_time_partitioning_getter_w_none(self): dataset = DatasetReference(self.PROJECT, self.DS_ID) @@ -974,7 +983,12 @@ def test_time_partitioning_getter_w_empty(self): self.assertIsNone(table.time_partitioning.type_) self.assertIsNone(table.time_partitioning.field) self.assertIsNone(table.time_partitioning.expiration_ms) - self.assertIsNone(table.time_partitioning.require_partition_filter) + + with warnings.catch_warnings(record=True) as warned: + self.assertIsNone(table.time_partitioning.require_partition_filter) + + for warning in warned: + self.assertIs(warning.category, PendingDeprecationWarning) def test_time_partitioning_setter(self): from google.cloud.bigquery.table import TimePartitioning @@ -1118,6 +1132,10 @@ def test_clustering_fields_setter_w_none_noop(self): self.assertFalse("clustering" in table._properties) def test_encryption_configuration_setter(self): + # Previously, the EncryptionConfiguration class was in the table module, not the + # encryption_configuration module. It was moved to support models encryption. + # This test import from the table module to ensure that the previous location + # continues to function as an alias. from google.cloud.bigquery.table import EncryptionConfiguration dataset = DatasetReference(self.PROJECT, self.DS_ID) @@ -1169,7 +1187,8 @@ def test__row_from_mapping_wo_schema(self): self.assertEqual(exc.exception.args, (_TABLE_HAS_NO_SCHEMA,)) def test__row_from_mapping_w_invalid_schema(self): - from google.cloud.bigquery.table import Table, SchemaField + from google.cloud.bigquery.schema import SchemaField + from google.cloud.bigquery.table import Table MAPPING = { "full_name": "Phred Phlyntstone", @@ -1191,7 +1210,8 @@ def test__row_from_mapping_w_invalid_schema(self): self.assertIn("Unknown field mode: BOGUS", str(exc.exception)) def test__row_from_mapping_w_schema(self): - from google.cloud.bigquery.table import Table, SchemaField + from google.cloud.bigquery.schema import SchemaField + from google.cloud.bigquery.table import Table MAPPING = { "full_name": "Phred Phlyntstone", @@ -1521,8 +1541,24 @@ def test_constructor_with_table(self): self.assertIs(iterator._table, table) self.assertEqual(iterator.total_rows, 100) + def test_constructor_with_dict_schema(self): + from google.cloud.bigquery.schema import SchemaField + + schema = [ + {"name": "full_name", "type": "STRING", "mode": "REQUIRED"}, + {"name": "age", "type": "INT64", "mode": "NULLABLE"}, + ] + + iterator = self._make_one(schema=schema) + + expected_schema = [ + SchemaField("full_name", "STRING", mode="REQUIRED"), + SchemaField("age", "INT64", mode="NULLABLE"), + ] + self.assertEqual(iterator.schema, expected_schema) + def test_iterate(self): - from google.cloud.bigquery.table import SchemaField + from google.cloud.bigquery.schema import SchemaField schema = [ SchemaField("name", "STRING", mode="REQUIRED"), @@ -1553,7 +1589,7 @@ def test_iterate(self): api_request.assert_called_once_with(method="GET", path=path, query_params={}) def test_page_size(self): - from google.cloud.bigquery.table import SchemaField + from google.cloud.bigquery.schema import SchemaField schema = [ SchemaField("name", "STRING", mode="REQUIRED"), @@ -1579,7 +1615,7 @@ def test_page_size(self): @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_to_arrow(self): - from google.cloud.bigquery.table import SchemaField + from google.cloud.bigquery.schema import SchemaField schema = [ SchemaField("name", "STRING", mode="REQUIRED"), @@ -1661,7 +1697,7 @@ def test_to_arrow(self): @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_to_arrow_w_nulls(self): - from google.cloud.bigquery.table import SchemaField + from google.cloud.bigquery.schema import SchemaField schema = [SchemaField("name", "STRING"), SchemaField("age", "INTEGER")] rows = [ @@ -1694,7 +1730,7 @@ def test_to_arrow_w_nulls(self): @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_to_arrow_w_unknown_type(self): - from google.cloud.bigquery.table import SchemaField + from google.cloud.bigquery.schema import SchemaField schema = [ SchemaField("name", "STRING", mode="REQUIRED"), @@ -1732,7 +1768,7 @@ def test_to_arrow_w_unknown_type(self): @unittest.skipIf(pyarrow is None, "Requires `pyarrow`") def test_to_arrow_w_empty_table(self): - from google.cloud.bigquery.table import SchemaField + from google.cloud.bigquery.schema import SchemaField schema = [ SchemaField("name", "STRING", mode="REQUIRED"), @@ -1894,7 +1930,7 @@ def test_to_arrow_w_bqstorage_no_streams(self): @mock.patch("tqdm.tqdm_notebook") @mock.patch("tqdm.tqdm") def test_to_arrow_progress_bar(self, tqdm_mock, tqdm_notebook_mock, tqdm_gui_mock): - from google.cloud.bigquery.table import SchemaField + from google.cloud.bigquery.schema import SchemaField schema = [ SchemaField("name", "STRING", mode="REQUIRED"), @@ -1937,7 +1973,7 @@ def test_to_arrow_w_pyarrow_none(self): @unittest.skipIf(pandas is None, "Requires `pandas`") def test_to_dataframe(self): - from google.cloud.bigquery.table import SchemaField + from google.cloud.bigquery.schema import SchemaField schema = [ SchemaField("name", "STRING", mode="REQUIRED"), @@ -1969,7 +2005,7 @@ def test_to_dataframe(self): def test_to_dataframe_progress_bar( self, tqdm_mock, tqdm_notebook_mock, tqdm_gui_mock ): - from google.cloud.bigquery.table import SchemaField + from google.cloud.bigquery.schema import SchemaField schema = [ SchemaField("name", "STRING", mode="REQUIRED"), @@ -2002,7 +2038,7 @@ def test_to_dataframe_progress_bar( @unittest.skipIf(pandas is None, "Requires `pandas`") @mock.patch("google.cloud.bigquery.table.tqdm", new=None) def test_to_dataframe_no_tqdm_no_progress_bar(self): - from google.cloud.bigquery.table import SchemaField + from google.cloud.bigquery.schema import SchemaField schema = [ SchemaField("name", "STRING", mode="REQUIRED"), @@ -2027,7 +2063,7 @@ def test_to_dataframe_no_tqdm_no_progress_bar(self): @unittest.skipIf(pandas is None, "Requires `pandas`") @mock.patch("google.cloud.bigquery.table.tqdm", new=None) def test_to_dataframe_no_tqdm(self): - from google.cloud.bigquery.table import SchemaField + from google.cloud.bigquery.schema import SchemaField schema = [ SchemaField("name", "STRING", mode="REQUIRED"), @@ -2060,7 +2096,7 @@ def test_to_dataframe_no_tqdm(self): @mock.patch("tqdm.tqdm_notebook", new=None) # will raise TypeError on call @mock.patch("tqdm.tqdm", new=None) # will raise TypeError on call def test_to_dataframe_tqdm_error(self): - from google.cloud.bigquery.table import SchemaField + from google.cloud.bigquery.schema import SchemaField schema = [ SchemaField("name", "STRING", mode="REQUIRED"), @@ -2090,7 +2126,7 @@ def test_to_dataframe_tqdm_error(self): @unittest.skipIf(pandas is None, "Requires `pandas`") def test_to_dataframe_w_empty_results(self): - from google.cloud.bigquery.table import SchemaField + from google.cloud.bigquery.schema import SchemaField schema = [ SchemaField("name", "STRING", mode="REQUIRED"), @@ -2125,7 +2161,7 @@ def test_to_dataframe_logs_tabledata_list(self): @unittest.skipIf(pandas is None, "Requires `pandas`") def test_to_dataframe_w_various_types_nullable(self): import datetime - from google.cloud.bigquery.table import SchemaField + from google.cloud.bigquery.schema import SchemaField schema = [ SchemaField("start_timestamp", "TIMESTAMP"), @@ -2165,7 +2201,7 @@ def test_to_dataframe_w_various_types_nullable(self): @unittest.skipIf(pandas is None, "Requires `pandas`") def test_to_dataframe_column_dtypes(self): - from google.cloud.bigquery.table import SchemaField + from google.cloud.bigquery.schema import SchemaField schema = [ SchemaField("start_timestamp", "TIMESTAMP"), @@ -2203,7 +2239,7 @@ def test_to_dataframe_column_dtypes(self): @mock.patch("google.cloud.bigquery.table.pandas", new=None) def test_to_dataframe_error_if_pandas_is_none(self): - from google.cloud.bigquery.table import SchemaField + from google.cloud.bigquery.schema import SchemaField schema = [ SchemaField("name", "STRING", mode="REQUIRED"), @@ -2222,7 +2258,7 @@ def test_to_dataframe_error_if_pandas_is_none(self): @unittest.skipIf(pandas is None, "Requires `pandas`") def test_to_dataframe_max_results_w_bqstorage_warning(self): - from google.cloud.bigquery.table import SchemaField + from google.cloud.bigquery.schema import SchemaField schema = [ SchemaField("name", "STRING", mode="REQUIRED"), @@ -2824,6 +2860,96 @@ def test_to_dataframe_w_bqstorage_snapshot(self): row_iterator.to_dataframe(bqstorage_client) +class TestPartitionRange(unittest.TestCase): + def _get_target_class(self): + from google.cloud.bigquery.table import PartitionRange + + return PartitionRange + + def _make_one(self, *args, **kw): + return self._get_target_class()(*args, **kw) + + def test_constructor_defaults(self): + object_under_test = self._make_one() + assert object_under_test.start is None + assert object_under_test.end is None + assert object_under_test.interval is None + + def test_constructor_w_properties(self): + object_under_test = self._make_one(start=1, end=10, interval=2) + assert object_under_test.start == 1 + assert object_under_test.end == 10 + assert object_under_test.interval == 2 + + def test_constructor_w_resource(self): + object_under_test = self._make_one( + _properties={"start": -1234567890, "end": 1234567890, "interval": 1000000} + ) + assert object_under_test.start == -1234567890 + assert object_under_test.end == 1234567890 + assert object_under_test.interval == 1000000 + + def test_repr(self): + object_under_test = self._make_one(start=1, end=10, interval=2) + assert repr(object_under_test) == "PartitionRange(end=10, interval=2, start=1)" + + +class TestRangePartitioning(unittest.TestCase): + def _get_target_class(self): + from google.cloud.bigquery.table import RangePartitioning + + return RangePartitioning + + def _make_one(self, *args, **kw): + return self._get_target_class()(*args, **kw) + + def test_constructor_defaults(self): + object_under_test = self._make_one() + assert object_under_test.field is None + assert object_under_test.range_.start is None + assert object_under_test.range_.end is None + assert object_under_test.range_.interval is None + + def test_constructor_w_properties(self): + from google.cloud.bigquery.table import PartitionRange + + object_under_test = self._make_one( + range_=PartitionRange(start=1, end=10, interval=2), field="integer_col" + ) + assert object_under_test.field == "integer_col" + assert object_under_test.range_.start == 1 + assert object_under_test.range_.end == 10 + assert object_under_test.range_.interval == 2 + + def test_constructor_w_resource(self): + object_under_test = self._make_one( + _properties={ + "field": "some_column", + "range": {"start": -1234567890, "end": 1234567890, "interval": 1000000}, + } + ) + assert object_under_test.field == "some_column" + assert object_under_test.range_.start == -1234567890 + assert object_under_test.range_.end == 1234567890 + assert object_under_test.range_.interval == 1000000 + + def test_range_w_wrong_type(self): + object_under_test = self._make_one() + with pytest.raises(ValueError, match="PartitionRange"): + object_under_test.range_ = object() + + def test_repr(self): + from google.cloud.bigquery.table import PartitionRange + + object_under_test = self._make_one( + range_=PartitionRange(start=1, end=10, interval=2), field="integer_col" + ) + assert ( + repr(object_under_test) + == "RangePartitioning(field='integer_col', range_=PartitionRange(end=10, interval=2, start=1))" + ) + + class TestTimePartitioning(unittest.TestCase): def _get_target_class(self): from google.cloud.bigquery.table import TimePartitioning @@ -2835,26 +2961,32 @@ def _make_one(self, *args, **kw): def test_constructor_defaults(self): time_partitioning = self._make_one() - self.assertEqual(time_partitioning.type_, "DAY") self.assertIsNone(time_partitioning.field) self.assertIsNone(time_partitioning.expiration_ms) - self.assertIsNone(time_partitioning.require_partition_filter) def test_constructor_explicit(self): from google.cloud.bigquery.table import TimePartitioningType time_partitioning = self._make_one( - type_=TimePartitioningType.DAY, - field="name", - expiration_ms=10000, - require_partition_filter=True, + type_=TimePartitioningType.DAY, field="name", expiration_ms=10000 ) self.assertEqual(time_partitioning.type_, "DAY") self.assertEqual(time_partitioning.field, "name") self.assertEqual(time_partitioning.expiration_ms, 10000) - self.assertTrue(time_partitioning.require_partition_filter) + + def test_require_partition_filter_warns_deprecation(self): + object_under_test = self._make_one() + + with warnings.catch_warnings(record=True) as warned: + assert object_under_test.require_partition_filter is None + object_under_test.require_partition_filter = True + assert object_under_test.require_partition_filter + + assert len(warned) == 3 + for warning in warned: + self.assertIs(warning.category, PendingDeprecationWarning) def test_from_api_repr_empty(self): klass = self._get_target_class() @@ -2868,7 +3000,6 @@ def test_from_api_repr_empty(self): self.assertIsNone(time_partitioning.type_) self.assertIsNone(time_partitioning.field) self.assertIsNone(time_partitioning.expiration_ms) - self.assertIsNone(time_partitioning.require_partition_filter) def test_from_api_repr_minimal(self): from google.cloud.bigquery.table import TimePartitioningType @@ -2880,7 +3011,6 @@ def test_from_api_repr_minimal(self): self.assertEqual(time_partitioning.type_, TimePartitioningType.DAY) self.assertIsNone(time_partitioning.field) self.assertIsNone(time_partitioning.expiration_ms) - self.assertIsNone(time_partitioning.require_partition_filter) def test_from_api_repr_doesnt_override_type(self): klass = self._get_target_class() @@ -2903,7 +3033,11 @@ def test_from_api_repr_explicit(self): self.assertEqual(time_partitioning.type_, TimePartitioningType.DAY) self.assertEqual(time_partitioning.field, "name") self.assertEqual(time_partitioning.expiration_ms, 10000) - self.assertTrue(time_partitioning.require_partition_filter) + + with warnings.catch_warnings(record=True) as warned: + self.assertTrue(time_partitioning.require_partition_filter) + + self.assertIs(warned[0].category, PendingDeprecationWarning) def test_to_api_repr_defaults(self): time_partitioning = self._make_one() @@ -2914,12 +3048,14 @@ def test_to_api_repr_explicit(self): from google.cloud.bigquery.table import TimePartitioningType time_partitioning = self._make_one( - type_=TimePartitioningType.DAY, - field="name", - expiration_ms=10000, - require_partition_filter=True, + type_=TimePartitioningType.DAY, field="name", expiration_ms=10000 ) + with warnings.catch_warnings(record=True) as warned: + time_partitioning.require_partition_filter = True + + self.assertIs(warned[0].category, PendingDeprecationWarning) + expected = { "type": "DAY", "field": "name", @@ -2950,21 +3086,21 @@ def test___eq___expiration_ms_mismatch(self): self.assertNotEqual(time_partitioning, other) def test___eq___require_partition_filter_mismatch(self): - time_partitioning = self._make_one( - field="foo", expiration_ms=100000, require_partition_filter=True - ) - other = self._make_one( - field="foo", expiration_ms=100000, require_partition_filter=False - ) + time_partitioning = self._make_one(field="foo", expiration_ms=100000) + other = self._make_one(field="foo", expiration_ms=100000) + with warnings.catch_warnings(record=True) as warned: + time_partitioning.require_partition_filter = True + other.require_partition_filter = False + + assert len(warned) == 2 + for warning in warned: + self.assertIs(warning.category, PendingDeprecationWarning) + self.assertNotEqual(time_partitioning, other) def test___eq___hit(self): - time_partitioning = self._make_one( - field="foo", expiration_ms=100000, require_partition_filter=True - ) - other = self._make_one( - field="foo", expiration_ms=100000, require_partition_filter=True - ) + time_partitioning = self._make_one(field="foo", expiration_ms=100000) + other = self._make_one(field="foo", expiration_ms=100000) self.assertEqual(time_partitioning, other) def test___ne___wrong_type(self): @@ -3008,18 +3144,9 @@ def test___repr___explicit(self): from google.cloud.bigquery.table import TimePartitioningType time_partitioning = self._make_one( - type_=TimePartitioningType.DAY, - field="name", - expiration_ms=10000, - require_partition_filter=True, - ) - expected = ( - "TimePartitioning(" - "expirationMs=10000," - "field=name," - "requirePartitionFilter=True," - "type=DAY)" + type_=TimePartitioningType.DAY, field="name", expiration_ms=10000 ) + expected = "TimePartitioning(" "expirationMs=10000," "field=name," "type=DAY)" self.assertEqual(repr(time_partitioning), expected) def test_set_expiration_w_none(self): diff --git a/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/gapic/data_transfer_service_client.py b/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/gapic/data_transfer_service_client.py index 5c52eed8a43d..4466ec5b9c18 100644 --- a/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/gapic/data_transfer_service_client.py +++ b/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/gapic/data_transfer_service_client.py @@ -297,8 +297,8 @@ def get_data_source( >>> response = client.get_data_source(name) Args: - name (str): The field will contain name of the resource requested, for example: - ``projects/{project_id}/dataSources/{data_source_id}`` + name (str): Required. The field will contain name of the resource requested, for + example: ``projects/{project_id}/dataSources/{data_source_id}`` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will be retried using a default configuration. @@ -381,8 +381,8 @@ def list_data_sources( ... pass Args: - parent (str): The BigQuery project id for which data sources should be returned. Must - be in the form: ``projects/{project_id}`` + parent (str): Required. The BigQuery project id for which data sources should be + returned. Must be in the form: ``projects/{project_id}`` page_size (int): The maximum number of resources contained in the underlying API response. If page streaming is performed per- resource, this parameter does not affect the return value. If page @@ -478,12 +478,12 @@ def create_transfer_config( >>> response = client.create_transfer_config(parent, transfer_config) Args: - parent (str): The BigQuery project id where the transfer configuration should be - created. Must be in the format + parent (str): Required. The BigQuery project id where the transfer configuration + should be created. Must be in the format projects/{project\_id}/locations/{location\_id} If specified location and location of the destination bigquery dataset do not match - the request will fail. - transfer_config (Union[dict, ~google.cloud.bigquery_datatransfer_v1.types.TransferConfig]): Data transfer configuration to create. + transfer_config (Union[dict, ~google.cloud.bigquery_datatransfer_v1.types.TransferConfig]): Required. Data transfer configuration to create. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.bigquery_datatransfer_v1.types.TransferConfig` @@ -591,11 +591,11 @@ def update_transfer_config( >>> response = client.update_transfer_config(transfer_config, update_mask) Args: - transfer_config (Union[dict, ~google.cloud.bigquery_datatransfer_v1.types.TransferConfig]): Data transfer configuration to create. + transfer_config (Union[dict, ~google.cloud.bigquery_datatransfer_v1.types.TransferConfig]): Required. Data transfer configuration to create. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.bigquery_datatransfer_v1.types.TransferConfig` - update_mask (Union[dict, ~google.cloud.bigquery_datatransfer_v1.types.FieldMask]): Required list of fields to be updated in this request. + update_mask (Union[dict, ~google.cloud.bigquery_datatransfer_v1.types.FieldMask]): Required. Required list of fields to be updated in this request. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.bigquery_datatransfer_v1.types.FieldMask` @@ -696,8 +696,8 @@ def delete_transfer_config( >>> client.delete_transfer_config(name) Args: - name (str): The field will contain name of the resource requested, for example: - ``projects/{project_id}/transferConfigs/{config_id}`` + name (str): Required. The field will contain name of the resource requested, for + example: ``projects/{project_id}/transferConfigs/{config_id}`` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will be retried using a default configuration. @@ -763,8 +763,8 @@ def get_transfer_config( >>> response = client.get_transfer_config(name) Args: - name (str): The field will contain name of the resource requested, for example: - ``projects/{project_id}/transferConfigs/{config_id}`` + name (str): Required. The field will contain name of the resource requested, for + example: ``projects/{project_id}/transferConfigs/{config_id}`` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will be retried using a default configuration. @@ -847,8 +847,8 @@ def list_transfer_configs( ... pass Args: - parent (str): The BigQuery project id for which data sources should be returned: - ``projects/{project_id}``. + parent (str): Required. The BigQuery project id for which data sources should be + returned: ``projects/{project_id}``. data_source_ids (list[str]): When specified, only configurations of requested data sources are returned. page_size (int): The maximum number of resources contained in the underlying API response. If page streaming is performed per- @@ -950,14 +950,14 @@ def schedule_transfer_runs( >>> response = client.schedule_transfer_runs(parent, start_time, end_time) Args: - parent (str): Transfer configuration name in the form: + parent (str): Required. Transfer configuration name in the form: ``projects/{project_id}/transferConfigs/{config_id}``. - start_time (Union[dict, ~google.cloud.bigquery_datatransfer_v1.types.Timestamp]): Start time of the range of transfer runs. For example, + start_time (Union[dict, ~google.cloud.bigquery_datatransfer_v1.types.Timestamp]): Required. Start time of the range of transfer runs. For example, ``"2017-05-25T00:00:00+00:00"``. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.bigquery_datatransfer_v1.types.Timestamp` - end_time (Union[dict, ~google.cloud.bigquery_datatransfer_v1.types.Timestamp]): End time of the range of transfer runs. For example, + end_time (Union[dict, ~google.cloud.bigquery_datatransfer_v1.types.Timestamp]): Required. End time of the range of transfer runs. For example, ``"2017-05-30T00:00:00+00:00"``. If a dict is provided, it must be of the same form as the protobuf @@ -1032,7 +1032,8 @@ def get_transfer_run( >>> response = client.get_transfer_run(name) Args: - name (str): The field will contain name of the resource requested, for example: + name (str): Required. The field will contain name of the resource requested, for + example: ``projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}`` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will @@ -1102,7 +1103,8 @@ def delete_transfer_run( >>> client.delete_transfer_run(name) Args: - name (str): The field will contain name of the resource requested, for example: + name (str): Required. The field will contain name of the resource requested, for + example: ``projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}`` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will @@ -1184,8 +1186,8 @@ def list_transfer_runs( ... pass Args: - parent (str): Name of transfer configuration for which transfer runs should be - retrieved. Format of transfer configuration resource name is: + parent (str): Required. Name of transfer configuration for which transfer runs should + be retrieved. Format of transfer configuration resource name is: ``projects/{project_id}/transferConfigs/{config_id}``. states (list[~google.cloud.bigquery_datatransfer_v1.types.TransferState]): When specified, only transfer runs with requested states are returned. page_size (int): The maximum number of resources contained in the @@ -1292,7 +1294,7 @@ def list_transfer_logs( ... pass Args: - parent (str): Transfer run name in the form: + parent (str): Required. Transfer run name in the form: ``projects/{project_id}/transferConfigs/{config_Id}/runs/{run_id}``. page_size (int): The maximum number of resources contained in the underlying API response. If page streaming is performed per- @@ -1390,7 +1392,7 @@ def check_valid_creds( >>> response = client.check_valid_creds(name) Args: - name (str): The data source in the form: + name (str): Required. The data source in the form: ``projects/{project_id}/dataSources/{data_source_id}`` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will diff --git a/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/proto/datatransfer.proto b/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/proto/datatransfer.proto index e9a39683494c..a464645b5ac3 100644 --- a/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/proto/datatransfer.proto +++ b/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/proto/datatransfer.proto @@ -18,13 +18,15 @@ syntax = "proto3"; package google.cloud.bigquery.datatransfer.v1; import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; import "google/cloud/bigquery/datatransfer/v1/transfer.proto"; import "google/protobuf/duration.proto"; import "google/protobuf/empty.proto"; import "google/protobuf/field_mask.proto"; import "google/protobuf/timestamp.proto"; import "google/protobuf/wrappers.proto"; -import "google/api/client.proto"; option csharp_namespace = "Google.Cloud.BigQuery.DataTransfer.V1"; option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/datatransfer/v1;datatransfer"; @@ -50,6 +52,7 @@ service DataTransferService { get: "/v1/{name=projects/*/dataSources/*}" } }; + option (google.api.method_signature) = "name"; } // Lists supported data sources and returns their settings, @@ -61,6 +64,7 @@ service DataTransferService { get: "/v1/{parent=projects/*}/dataSources" } }; + option (google.api.method_signature) = "parent"; } // Creates a new data transfer configuration. @@ -73,6 +77,7 @@ service DataTransferService { body: "transfer_config" } }; + option (google.api.method_signature) = "parent,transfer_config"; } // Updates a data transfer configuration. @@ -86,6 +91,7 @@ service DataTransferService { body: "transfer_config" } }; + option (google.api.method_signature) = "transfer_config,update_mask"; } // Deletes a data transfer configuration, @@ -97,6 +103,7 @@ service DataTransferService { delete: "/v1/{name=projects/*/transferConfigs/*}" } }; + option (google.api.method_signature) = "name"; } // Returns information about a data transfer config. @@ -107,6 +114,7 @@ service DataTransferService { get: "/v1/{name=projects/*/transferConfigs/*}" } }; + option (google.api.method_signature) = "name"; } // Returns information about all data transfers in the project. @@ -117,6 +125,7 @@ service DataTransferService { get: "/v1/{parent=projects/*}/transferConfigs" } }; + option (google.api.method_signature) = "parent"; } // Creates transfer runs for a time range [start_time, end_time]. @@ -134,6 +143,7 @@ service DataTransferService { body: "*" } }; + option (google.api.method_signature) = "parent,start_time,end_time"; } // Start manual transfer runs to be executed now with schedule_time equal to @@ -159,6 +169,7 @@ service DataTransferService { get: "/v1/{name=projects/*/transferConfigs/*/runs/*}" } }; + option (google.api.method_signature) = "name"; } // Deletes the specified transfer run. @@ -169,6 +180,7 @@ service DataTransferService { delete: "/v1/{name=projects/*/transferConfigs/*/runs/*}" } }; + option (google.api.method_signature) = "name"; } // Returns information about running and completed jobs. @@ -179,6 +191,7 @@ service DataTransferService { get: "/v1/{parent=projects/*/transferConfigs/*}/runs" } }; + option (google.api.method_signature) = "parent"; } // Returns user facing log messages for the data transfer run. @@ -189,6 +202,7 @@ service DataTransferService { get: "/v1/{parent=projects/*/transferConfigs/*/runs/*}/transferLogs" } }; + option (google.api.method_signature) = "parent"; } // Returns true if valid credentials exist for the given data source and @@ -206,6 +220,7 @@ service DataTransferService { body: "*" } }; + option (google.api.method_signature) = "name"; } } @@ -295,6 +310,11 @@ message DataSourceParameter { // Represents data source metadata. Metadata is sufficient to // render UI and request proper OAuth tokens. message DataSource { + option (google.api.resource) = { + type: "bigquerydatatransfer.googleapis.com/DataSource" + pattern: "projects/{project}/dataSources/{data_source}" + }; + // The type of authorization needed for this data source. enum AuthorizationType { // Type unspecified. @@ -326,7 +346,7 @@ message DataSource { } // Output only. Data source resource name. - string name = 1; + string name = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; // Data source id. string data_source_id = 2; @@ -396,16 +416,26 @@ message DataSource { // A request to get data source info. message GetDataSourceRequest { - // The field will contain name of the resource requested, for example: + // Required. The field will contain name of the resource requested, for example: // `projects/{project_id}/dataSources/{data_source_id}` - string name = 1; + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerydatatransfer.googleapis.com/DataSource" + } + ]; } // Request to list supported data sources and their data transfer settings. message ListDataSourcesRequest { - // The BigQuery project id for which data sources should be returned. + // Required. The BigQuery project id for which data sources should be returned. // Must be in the form: `projects/{project_id}` - string parent = 1; + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "cloudresourcemanager.googleapis.com/Project" + } + ]; // Pagination token, which can be used to request a specific page // of `ListDataSourcesRequest` list results. For multiple-page @@ -427,7 +457,7 @@ message ListDataSourcesResponse { // this token can be used as the // `ListDataSourcesRequest.page_token` // to request the next page of list results. - string next_page_token = 2; + string next_page_token = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; } // A request to create a data transfer configuration. If new credentials are @@ -437,14 +467,19 @@ message ListDataSourcesResponse { // authorization code. Otherwise, the transfer configuration will be associated // with the calling user. message CreateTransferConfigRequest { - // The BigQuery project id where the transfer configuration should be created. + // Required. The BigQuery project id where the transfer configuration should be created. // Must be in the format projects/{project_id}/locations/{location_id} // If specified location and location of the destination bigquery dataset // do not match - the request will fail. - string parent = 1; + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "locations.googleapis.com/Location" + } + ]; - // Data transfer configuration to create. - TransferConfig transfer_config = 2; + // Required. Data transfer configuration to create. + TransferConfig transfer_config = 2 [(google.api.field_behavior) = REQUIRED]; // Optional OAuth2 authorization code to use with this transfer configuration. // This is required if new credentials are needed, as indicated by @@ -476,8 +511,8 @@ message CreateTransferConfigRequest { // A request to update a transfer configuration. To update the user id of the // transfer configuration, an authorization code needs to be provided. message UpdateTransferConfigRequest { - // Data transfer configuration to create. - TransferConfig transfer_config = 1; + // Required. Data transfer configuration to create. + TransferConfig transfer_config = 1 [(google.api.field_behavior) = REQUIRED]; // Optional OAuth2 authorization code to use with this transfer configuration. // If it is provided, the transfer configuration will be associated with the @@ -497,8 +532,8 @@ message UpdateTransferConfigRequest { // the user to copy the code and paste it in the application. string authorization_code = 3; - // Required list of fields to be updated in this request. - google.protobuf.FieldMask update_mask = 4; + // Required. Required list of fields to be updated in this request. + google.protobuf.FieldMask update_mask = 4 [(google.api.field_behavior) = REQUIRED]; // Optional version info. If users want to find a very recent access token, // that is, immediately after approving access, users have to set the @@ -511,38 +546,63 @@ message UpdateTransferConfigRequest { // A request to get data transfer information. message GetTransferConfigRequest { - // The field will contain name of the resource requested, for example: + // Required. The field will contain name of the resource requested, for example: // `projects/{project_id}/transferConfigs/{config_id}` - string name = 1; + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerydatatransfer.googleapis.com/TransferConfig" + } + ]; } // A request to delete data transfer information. All associated transfer runs // and log messages will be deleted as well. message DeleteTransferConfigRequest { - // The field will contain name of the resource requested, for example: + // Required. The field will contain name of the resource requested, for example: // `projects/{project_id}/transferConfigs/{config_id}` - string name = 1; + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerydatatransfer.googleapis.com/TransferConfig" + } + ]; } // A request to get data transfer run information. message GetTransferRunRequest { - // The field will contain name of the resource requested, for example: + // Required. The field will contain name of the resource requested, for example: // `projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}` - string name = 1; + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerydatatransfer.googleapis.com/TransferRun" + } + ]; } // A request to delete data transfer run information. message DeleteTransferRunRequest { - // The field will contain name of the resource requested, for example: + // Required. The field will contain name of the resource requested, for example: // `projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}` - string name = 1; + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerydatatransfer.googleapis.com/TransferRun" + } + ]; } // A request to list data transfers configured for a BigQuery project. message ListTransferConfigsRequest { - // The BigQuery project id for which data sources + // Required. The BigQuery project id for which data sources // should be returned: `projects/{project_id}`. - string parent = 1; + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "cloudresourcemanager.googleapis.com/Project" + } + ]; // When specified, only configurations of requested data sources are returned. repeated string data_source_ids = 2; @@ -561,13 +621,13 @@ message ListTransferConfigsRequest { // The returned list of pipelines in the project. message ListTransferConfigsResponse { // Output only. The stored pipeline transfer configurations. - repeated TransferConfig transfer_configs = 1; + repeated TransferConfig transfer_configs = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The next-pagination token. For multiple-page list results, // this token can be used as the // `ListTransferConfigsRequest.page_token` // to request the next page of list results. - string next_page_token = 2; + string next_page_token = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; } // A request to list data transfer runs. UI can use this method to show/filter @@ -583,10 +643,15 @@ message ListTransferRunsRequest { LATEST = 1; } - // Name of transfer configuration for which transfer runs should be retrieved. + // Required. Name of transfer configuration for which transfer runs should be retrieved. // Format of transfer configuration resource name is: // `projects/{project_id}/transferConfigs/{config_id}`. - string parent = 1; + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerydatatransfer.googleapis.com/TransferConfig" + } + ]; // When specified, only transfer runs with requested states are returned. repeated TransferState states = 2; @@ -608,20 +673,25 @@ message ListTransferRunsRequest { // The returned list of pipelines in the project. message ListTransferRunsResponse { // Output only. The stored pipeline transfer runs. - repeated TransferRun transfer_runs = 1; + repeated TransferRun transfer_runs = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The next-pagination token. For multiple-page list results, // this token can be used as the // `ListTransferRunsRequest.page_token` // to request the next page of list results. - string next_page_token = 2; + string next_page_token = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; } // A request to get user facing log messages associated with data transfer run. message ListTransferLogsRequest { - // Transfer run name in the form: + // Required. Transfer run name in the form: // `projects/{project_id}/transferConfigs/{config_Id}/runs/{run_id}`. - string parent = 1; + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerydatatransfer.googleapis.com/TransferRun" + } + ]; // Pagination token, which can be used to request a specific page // of `ListTransferLogsRequest` list results. For multiple-page @@ -641,13 +711,13 @@ message ListTransferLogsRequest { // The returned list transfer run messages. message ListTransferLogsResponse { // Output only. The stored pipeline transfer messages. - repeated TransferMessage transfer_messages = 1; + repeated TransferMessage transfer_messages = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The next-pagination token. For multiple-page list results, // this token can be used as the // `GetTransferRunLogRequest.page_token` // to request the next page of list results. - string next_page_token = 2; + string next_page_token = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; } // A request to determine whether the user has valid credentials. This method @@ -657,9 +727,14 @@ message ListTransferLogsResponse { // returns false, as it cannot be determined whether the credentials are // already valid merely based on the user id. message CheckValidCredsRequest { - // The data source in the form: + // Required. The data source in the form: // `projects/{project_id}/dataSources/{data_source_id}` - string name = 1; + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerydatatransfer.googleapis.com/DataSource" + } + ]; } // A response indicating whether the credentials exist and are valid. @@ -670,17 +745,22 @@ message CheckValidCredsResponse { // A request to schedule transfer runs for a time range. message ScheduleTransferRunsRequest { - // Transfer configuration name in the form: + // Required. Transfer configuration name in the form: // `projects/{project_id}/transferConfigs/{config_id}`. - string parent = 1; - - // Start time of the range of transfer runs. For example, + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "bigquerydatatransfer.googleapis.com/TransferConfig" + } + ]; + + // Required. Start time of the range of transfer runs. For example, // `"2017-05-25T00:00:00+00:00"`. - google.protobuf.Timestamp start_time = 2; + google.protobuf.Timestamp start_time = 2 [(google.api.field_behavior) = REQUIRED]; - // End time of the range of transfer runs. For example, + // Required. End time of the range of transfer runs. For example, // `"2017-05-30T00:00:00+00:00"`. - google.protobuf.Timestamp end_time = 3; + google.protobuf.Timestamp end_time = 3 [(google.api.field_behavior) = REQUIRED]; } // A response to schedule transfer runs for a time range. @@ -709,7 +789,9 @@ message StartManualTransferRunsRequest { // Transfer configuration name in the form: // `projects/{project_id}/transferConfigs/{config_id}`. - string parent = 1; + string parent = 1 [(google.api.resource_reference) = { + type: "bigquerydatatransfer.googleapis.com/TransferConfig" + }]; // The requested time specification - this can be a time range or a specific // run_time. diff --git a/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/proto/datatransfer_pb2.py b/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/proto/datatransfer_pb2.py index 55338100718e..a70a55e3df79 100644 --- a/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/proto/datatransfer_pb2.py +++ b/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/proto/datatransfer_pb2.py @@ -16,6 +16,9 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 +from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 from google.cloud.bigquery_datatransfer_v1.proto import ( transfer_pb2 as google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2, ) @@ -24,7 +27,6 @@ from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2 -from google.api import client_pb2 as google_dot_api_dot_client__pb2 DESCRIPTOR = _descriptor.FileDescriptor( @@ -35,17 +37,19 @@ "\n)com.google.cloud.bigquery.datatransfer.v1B\021DataTransferProtoP\001ZQgoogle.golang.org/genproto/googleapis/cloud/bigquery/datatransfer/v1;datatransfer\252\002%Google.Cloud.BigQuery.DataTransfer.V1\312\002%Google\\Cloud\\BigQuery\\DataTransfer\\V1" ), serialized_pb=_b( - '\n>google/cloud/bigquery/datatransfer_v1/proto/datatransfer.proto\x12%google.cloud.bigquery.datatransfer.v1\x1a\x1cgoogle/api/annotations.proto\x1a:google/cloud/bigquery/datatransfer_v1/proto/transfer.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\x1a\x17google/api/client.proto"\x85\x05\n\x13\x44\x61taSourceParameter\x12\x10\n\x08param_id\x18\x01 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x02 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\t\x12M\n\x04type\x18\x04 \x01(\x0e\x32?.google.cloud.bigquery.datatransfer.v1.DataSourceParameter.Type\x12\x10\n\x08required\x18\x05 \x01(\x08\x12\x10\n\x08repeated\x18\x06 \x01(\x08\x12\x18\n\x10validation_regex\x18\x07 \x01(\t\x12\x16\n\x0e\x61llowed_values\x18\x08 \x03(\t\x12/\n\tmin_value\x18\t \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12/\n\tmax_value\x18\n \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12J\n\x06\x66ields\x18\x0b \x03(\x0b\x32:.google.cloud.bigquery.datatransfer.v1.DataSourceParameter\x12\x1e\n\x16validation_description\x18\x0c \x01(\t\x12\x1b\n\x13validation_help_url\x18\r \x01(\t\x12\x11\n\timmutable\x18\x0e \x01(\x08\x12\x0f\n\x07recurse\x18\x0f \x01(\x08\x12\x12\n\ndeprecated\x18\x14 \x01(\x08"i\n\x04Type\x12\x14\n\x10TYPE_UNSPECIFIED\x10\x00\x12\n\n\x06STRING\x10\x01\x12\x0b\n\x07INTEGER\x10\x02\x12\n\n\x06\x44OUBLE\x10\x03\x12\x0b\n\x07\x42OOLEAN\x10\x04\x12\n\n\x06RECORD\x10\x05\x12\r\n\tPLUS_PAGE\x10\x06"\xd7\x07\n\nDataSource\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x16\n\x0e\x64\x61ta_source_id\x18\x02 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x03 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x04 \x01(\t\x12\x11\n\tclient_id\x18\x05 \x01(\t\x12\x0e\n\x06scopes\x18\x06 \x03(\t\x12N\n\rtransfer_type\x18\x07 \x01(\x0e\x32\x33.google.cloud.bigquery.datatransfer.v1.TransferTypeB\x02\x18\x01\x12\'\n\x1bsupports_multiple_transfers\x18\x08 \x01(\x08\x42\x02\x18\x01\x12\x1f\n\x17update_deadline_seconds\x18\t \x01(\x05\x12\x18\n\x10\x64\x65\x66\x61ult_schedule\x18\n \x01(\t\x12 \n\x18supports_custom_schedule\x18\x0b \x01(\x08\x12N\n\nparameters\x18\x0c \x03(\x0b\x32:.google.cloud.bigquery.datatransfer.v1.DataSourceParameter\x12\x10\n\x08help_url\x18\r \x01(\t\x12_\n\x12\x61uthorization_type\x18\x0e \x01(\x0e\x32\x43.google.cloud.bigquery.datatransfer.v1.DataSource.AuthorizationType\x12\\\n\x11\x64\x61ta_refresh_type\x18\x0f \x01(\x0e\x32\x41.google.cloud.bigquery.datatransfer.v1.DataSource.DataRefreshType\x12(\n default_data_refresh_window_days\x18\x10 \x01(\x05\x12\x1c\n\x14manual_runs_disabled\x18\x11 \x01(\x08\x12<\n\x19minimum_schedule_interval\x18\x12 \x01(\x0b\x32\x19.google.protobuf.Duration"s\n\x11\x41uthorizationType\x12"\n\x1e\x41UTHORIZATION_TYPE_UNSPECIFIED\x10\x00\x12\x16\n\x12\x41UTHORIZATION_CODE\x10\x01\x12"\n\x1eGOOGLE_PLUS_AUTHORIZATION_CODE\x10\x02"c\n\x0f\x44\x61taRefreshType\x12!\n\x1d\x44\x41TA_REFRESH_TYPE_UNSPECIFIED\x10\x00\x12\x12\n\x0eSLIDING_WINDOW\x10\x01\x12\x19\n\x15\x43USTOM_SLIDING_WINDOW\x10\x02"$\n\x14GetDataSourceRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"O\n\x16ListDataSourcesRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\npage_token\x18\x03 \x01(\t\x12\x11\n\tpage_size\x18\x04 \x01(\x05"{\n\x17ListDataSourcesResponse\x12G\n\x0c\x64\x61ta_sources\x18\x01 \x03(\x0b\x32\x31.google.cloud.bigquery.datatransfer.v1.DataSource\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"\xaf\x01\n\x1b\x43reateTransferConfigRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12N\n\x0ftransfer_config\x18\x02 \x01(\x0b\x32\x35.google.cloud.bigquery.datatransfer.v1.TransferConfig\x12\x1a\n\x12\x61uthorization_code\x18\x03 \x01(\t\x12\x14\n\x0cversion_info\x18\x05 \x01(\t"\xd0\x01\n\x1bUpdateTransferConfigRequest\x12N\n\x0ftransfer_config\x18\x01 \x01(\x0b\x32\x35.google.cloud.bigquery.datatransfer.v1.TransferConfig\x12\x1a\n\x12\x61uthorization_code\x18\x03 \x01(\t\x12/\n\x0bupdate_mask\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\x12\x14\n\x0cversion_info\x18\x05 \x01(\t"(\n\x18GetTransferConfigRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"+\n\x1b\x44\x65leteTransferConfigRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"%\n\x15GetTransferRunRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"(\n\x18\x44\x65leteTransferRunRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"l\n\x1aListTransferConfigsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x17\n\x0f\x64\x61ta_source_ids\x18\x02 \x03(\t\x12\x12\n\npage_token\x18\x03 \x01(\t\x12\x11\n\tpage_size\x18\x04 \x01(\x05"\x87\x01\n\x1bListTransferConfigsResponse\x12O\n\x10transfer_configs\x18\x01 \x03(\x0b\x32\x35.google.cloud.bigquery.datatransfer.v1.TransferConfig\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"\xad\x02\n\x17ListTransferRunsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x44\n\x06states\x18\x02 \x03(\x0e\x32\x34.google.cloud.bigquery.datatransfer.v1.TransferState\x12\x12\n\npage_token\x18\x03 \x01(\t\x12\x11\n\tpage_size\x18\x04 \x01(\x05\x12^\n\x0brun_attempt\x18\x05 \x01(\x0e\x32I.google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest.RunAttempt"5\n\nRunAttempt\x12\x1b\n\x17RUN_ATTEMPT_UNSPECIFIED\x10\x00\x12\n\n\x06LATEST\x10\x01"~\n\x18ListTransferRunsResponse\x12I\n\rtransfer_runs\x18\x01 \x03(\x0b\x32\x32.google.cloud.bigquery.datatransfer.v1.TransferRun\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"\xaf\x01\n\x17ListTransferLogsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\npage_token\x18\x04 \x01(\t\x12\x11\n\tpage_size\x18\x05 \x01(\x05\x12]\n\rmessage_types\x18\x06 \x03(\x0e\x32\x46.google.cloud.bigquery.datatransfer.v1.TransferMessage.MessageSeverity"\x86\x01\n\x18ListTransferLogsResponse\x12Q\n\x11transfer_messages\x18\x01 \x03(\x0b\x32\x36.google.cloud.bigquery.datatransfer.v1.TransferMessage\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"&\n\x16\x43heckValidCredsRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"2\n\x17\x43heckValidCredsResponse\x12\x17\n\x0fhas_valid_creds\x18\x01 \x01(\x08"\x8b\x01\n\x1bScheduleTransferRunsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12.\n\nstart_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"`\n\x1cScheduleTransferRunsResponse\x12@\n\x04runs\x18\x01 \x03(\x0b\x32\x32.google.cloud.bigquery.datatransfer.v1.TransferRun"\xce\x02\n\x1eStartManualTransferRunsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12o\n\x14requested_time_range\x18\x03 \x01(\x0b\x32O.google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsRequest.TimeRangeH\x00\x12\x38\n\x12requested_run_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x1ai\n\tTimeRange\x12.\n\nstart_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x06\n\x04time"c\n\x1fStartManualTransferRunsResponse\x12@\n\x04runs\x18\x01 \x03(\x0b\x32\x32.google.cloud.bigquery.datatransfer.v1.TransferRun2\xa1\x1e\n\x13\x44\x61taTransferService\x12\xdf\x01\n\rGetDataSource\x12;.google.cloud.bigquery.datatransfer.v1.GetDataSourceRequest\x1a\x31.google.cloud.bigquery.datatransfer.v1.DataSource"^\x82\xd3\xe4\x93\x02X\x12//v1/{name=projects/*/locations/*/dataSources/*}Z%\x12#/v1/{name=projects/*/dataSources/*}\x12\xf0\x01\n\x0fListDataSources\x12=.google.cloud.bigquery.datatransfer.v1.ListDataSourcesRequest\x1a>.google.cloud.bigquery.datatransfer.v1.ListDataSourcesResponse"^\x82\xd3\xe4\x93\x02X\x12//v1/{parent=projects/*/locations/*}/dataSourcesZ%\x12#/v1/{parent=projects/*}/dataSources\x12\x9d\x02\n\x14\x43reateTransferConfig\x12\x42.google.cloud.bigquery.datatransfer.v1.CreateTransferConfigRequest\x1a\x35.google.cloud.bigquery.datatransfer.v1.TransferConfig"\x89\x01\x82\xd3\xe4\x93\x02\x82\x01"3/v1/{parent=projects/*/locations/*}/transferConfigs:\x0ftransfer_configZ:"\'/v1/{parent=projects/*}/transferConfigs:\x0ftransfer_config\x12\xbd\x02\n\x14UpdateTransferConfig\x12\x42.google.cloud.bigquery.datatransfer.v1.UpdateTransferConfigRequest\x1a\x35.google.cloud.bigquery.datatransfer.v1.TransferConfig"\xa9\x01\x82\xd3\xe4\x93\x02\xa2\x01\x32\x43/v1/{transfer_config.name=projects/*/locations/*/transferConfigs/*}:\x0ftransfer_configZJ27/v1/{transfer_config.name=projects/*/transferConfigs/*}:\x0ftransfer_config\x12\xda\x01\n\x14\x44\x65leteTransferConfig\x12\x42.google.cloud.bigquery.datatransfer.v1.DeleteTransferConfigRequest\x1a\x16.google.protobuf.Empty"f\x82\xd3\xe4\x93\x02`*3/v1/{name=projects/*/locations/*/transferConfigs/*}Z)*\'/v1/{name=projects/*/transferConfigs/*}\x12\xf3\x01\n\x11GetTransferConfig\x12?.google.cloud.bigquery.datatransfer.v1.GetTransferConfigRequest\x1a\x35.google.cloud.bigquery.datatransfer.v1.TransferConfig"f\x82\xd3\xe4\x93\x02`\x12\x33/v1/{name=projects/*/locations/*/transferConfigs/*}Z)\x12\'/v1/{name=projects/*/transferConfigs/*}\x12\x84\x02\n\x13ListTransferConfigs\x12\x41.google.cloud.bigquery.datatransfer.v1.ListTransferConfigsRequest\x1a\x42.google.cloud.bigquery.datatransfer.v1.ListTransferConfigsResponse"f\x82\xd3\xe4\x93\x02`\x12\x33/v1/{parent=projects/*/locations/*}/transferConfigsZ)\x12\'/v1/{parent=projects/*}/transferConfigs\x12\xb0\x02\n\x14ScheduleTransferRuns\x12\x42.google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsRequest\x1a\x43.google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsResponse"\x8e\x01\x88\x02\x01\x82\xd3\xe4\x93\x02\x84\x01"B/v1/{parent=projects/*/locations/*/transferConfigs/*}:scheduleRuns:\x01*Z;"6/v1/{parent=projects/*/transferConfigs/*}:scheduleRuns:\x01*\x12\xbc\x02\n\x17StartManualTransferRuns\x12\x45.google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsRequest\x1a\x46.google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsResponse"\x91\x01\x82\xd3\xe4\x93\x02\x8a\x01"E/v1/{parent=projects/*/locations/*/transferConfigs/*}:startManualRuns:\x01*Z>"9/v1/{parent=projects/*/transferConfigs/*}:startManualRuns:\x01*\x12\xf8\x01\n\x0eGetTransferRun\x12<.google.cloud.bigquery.datatransfer.v1.GetTransferRunRequest\x1a\x32.google.cloud.bigquery.datatransfer.v1.TransferRun"t\x82\xd3\xe4\x93\x02n\x12:/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}Z0\x12./v1/{name=projects/*/transferConfigs/*/runs/*}\x12\xe2\x01\n\x11\x44\x65leteTransferRun\x12?.google.cloud.bigquery.datatransfer.v1.DeleteTransferRunRequest\x1a\x16.google.protobuf.Empty"t\x82\xd3\xe4\x93\x02n*:/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}Z0*./v1/{name=projects/*/transferConfigs/*/runs/*}\x12\x89\x02\n\x10ListTransferRuns\x12>.google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest\x1a?.google.cloud.bigquery.datatransfer.v1.ListTransferRunsResponse"t\x82\xd3\xe4\x93\x02n\x12:/v1/{parent=projects/*/locations/*/transferConfigs/*}/runsZ0\x12./v1/{parent=projects/*/transferConfigs/*}/runs\x12\xa9\x02\n\x10ListTransferLogs\x12>.google.cloud.bigquery.datatransfer.v1.ListTransferLogsRequest\x1a?.google.cloud.bigquery.datatransfer.v1.ListTransferLogsResponse"\x93\x01\x82\xd3\xe4\x93\x02\x8c\x01\x12I/v1/{parent=projects/*/locations/*/transferConfigs/*/runs/*}/transferLogsZ?\x12=/v1/{parent=projects/*/transferConfigs/*/runs/*}/transferLogs\x12\x97\x02\n\x0f\x43heckValidCreds\x12=.google.cloud.bigquery.datatransfer.v1.CheckValidCredsRequest\x1a>.google.cloud.bigquery.datatransfer.v1.CheckValidCredsResponse"\x84\x01\x82\xd3\xe4\x93\x02~"?/v1/{name=projects/*/locations/*/dataSources/*}:checkValidCreds:\x01*Z8"3/v1/{name=projects/*/dataSources/*}:checkValidCreds:\x01*\x1aW\xca\x41#bigquerydatatransfer.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformB\xe3\x01\n)com.google.cloud.bigquery.datatransfer.v1B\x11\x44\x61taTransferProtoP\x01ZQgoogle.golang.org/genproto/googleapis/cloud/bigquery/datatransfer/v1;datatransfer\xaa\x02%Google.Cloud.BigQuery.DataTransfer.V1\xca\x02%Google\\Cloud\\BigQuery\\DataTransfer\\V1b\x06proto3' + '\n>google/cloud/bigquery/datatransfer_v1/proto/datatransfer.proto\x12%google.cloud.bigquery.datatransfer.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a:google/cloud/bigquery/datatransfer_v1/proto/transfer.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto"\x85\x05\n\x13\x44\x61taSourceParameter\x12\x10\n\x08param_id\x18\x01 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x02 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\t\x12M\n\x04type\x18\x04 \x01(\x0e\x32?.google.cloud.bigquery.datatransfer.v1.DataSourceParameter.Type\x12\x10\n\x08required\x18\x05 \x01(\x08\x12\x10\n\x08repeated\x18\x06 \x01(\x08\x12\x18\n\x10validation_regex\x18\x07 \x01(\t\x12\x16\n\x0e\x61llowed_values\x18\x08 \x03(\t\x12/\n\tmin_value\x18\t \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12/\n\tmax_value\x18\n \x01(\x0b\x32\x1c.google.protobuf.DoubleValue\x12J\n\x06\x66ields\x18\x0b \x03(\x0b\x32:.google.cloud.bigquery.datatransfer.v1.DataSourceParameter\x12\x1e\n\x16validation_description\x18\x0c \x01(\t\x12\x1b\n\x13validation_help_url\x18\r \x01(\t\x12\x11\n\timmutable\x18\x0e \x01(\x08\x12\x0f\n\x07recurse\x18\x0f \x01(\x08\x12\x12\n\ndeprecated\x18\x14 \x01(\x08"i\n\x04Type\x12\x14\n\x10TYPE_UNSPECIFIED\x10\x00\x12\n\n\x06STRING\x10\x01\x12\x0b\n\x07INTEGER\x10\x02\x12\n\n\x06\x44OUBLE\x10\x03\x12\x0b\n\x07\x42OOLEAN\x10\x04\x12\n\n\x06RECORD\x10\x05\x12\r\n\tPLUS_PAGE\x10\x06"\xbf\x08\n\nDataSource\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12\x16\n\x0e\x64\x61ta_source_id\x18\x02 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x03 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x04 \x01(\t\x12\x11\n\tclient_id\x18\x05 \x01(\t\x12\x0e\n\x06scopes\x18\x06 \x03(\t\x12N\n\rtransfer_type\x18\x07 \x01(\x0e\x32\x33.google.cloud.bigquery.datatransfer.v1.TransferTypeB\x02\x18\x01\x12\'\n\x1bsupports_multiple_transfers\x18\x08 \x01(\x08\x42\x02\x18\x01\x12\x1f\n\x17update_deadline_seconds\x18\t \x01(\x05\x12\x18\n\x10\x64\x65\x66\x61ult_schedule\x18\n \x01(\t\x12 \n\x18supports_custom_schedule\x18\x0b \x01(\x08\x12N\n\nparameters\x18\x0c \x03(\x0b\x32:.google.cloud.bigquery.datatransfer.v1.DataSourceParameter\x12\x10\n\x08help_url\x18\r \x01(\t\x12_\n\x12\x61uthorization_type\x18\x0e \x01(\x0e\x32\x43.google.cloud.bigquery.datatransfer.v1.DataSource.AuthorizationType\x12\\\n\x11\x64\x61ta_refresh_type\x18\x0f \x01(\x0e\x32\x41.google.cloud.bigquery.datatransfer.v1.DataSource.DataRefreshType\x12(\n default_data_refresh_window_days\x18\x10 \x01(\x05\x12\x1c\n\x14manual_runs_disabled\x18\x11 \x01(\x08\x12<\n\x19minimum_schedule_interval\x18\x12 \x01(\x0b\x32\x19.google.protobuf.Duration"s\n\x11\x41uthorizationType\x12"\n\x1e\x41UTHORIZATION_TYPE_UNSPECIFIED\x10\x00\x12\x16\n\x12\x41UTHORIZATION_CODE\x10\x01\x12"\n\x1eGOOGLE_PLUS_AUTHORIZATION_CODE\x10\x02"c\n\x0f\x44\x61taRefreshType\x12!\n\x1d\x44\x41TA_REFRESH_TYPE_UNSPECIFIED\x10\x00\x12\x12\n\x0eSLIDING_WINDOW\x10\x01\x12\x19\n\x15\x43USTOM_SLIDING_WINDOW\x10\x02:a\xea\x41^\n.bigquerydatatransfer.googleapis.com/DataSource\x12,projects/{project}/dataSources/{data_source}"\\\n\x14GetDataSourceRequest\x12\x44\n\x04name\x18\x01 \x01(\tB6\xe0\x41\x02\xfa\x41\x30\n.bigquerydatatransfer.googleapis.com/DataSource"\x84\x01\n\x16ListDataSourcesRequest\x12\x43\n\x06parent\x18\x01 \x01(\tB3\xe0\x41\x02\xfa\x41-\n+cloudresourcemanager.googleapis.com/Project\x12\x12\n\npage_token\x18\x03 \x01(\t\x12\x11\n\tpage_size\x18\x04 \x01(\x05"\x80\x01\n\x17ListDataSourcesResponse\x12G\n\x0c\x64\x61ta_sources\x18\x01 \x03(\x0b\x32\x31.google.cloud.bigquery.datatransfer.v1.DataSource\x12\x1c\n\x0fnext_page_token\x18\x02 \x01(\tB\x03\xe0\x41\x03"\xdf\x01\n\x1b\x43reateTransferConfigRequest\x12\x39\n\x06parent\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\n!locations.googleapis.com/Location\x12S\n\x0ftransfer_config\x18\x02 \x01(\x0b\x32\x35.google.cloud.bigquery.datatransfer.v1.TransferConfigB\x03\xe0\x41\x02\x12\x1a\n\x12\x61uthorization_code\x18\x03 \x01(\t\x12\x14\n\x0cversion_info\x18\x05 \x01(\t"\xda\x01\n\x1bUpdateTransferConfigRequest\x12S\n\x0ftransfer_config\x18\x01 \x01(\x0b\x32\x35.google.cloud.bigquery.datatransfer.v1.TransferConfigB\x03\xe0\x41\x02\x12\x1a\n\x12\x61uthorization_code\x18\x03 \x01(\t\x12\x34\n\x0bupdate_mask\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x02\x12\x14\n\x0cversion_info\x18\x05 \x01(\t"d\n\x18GetTransferConfigRequest\x12H\n\x04name\x18\x01 \x01(\tB:\xe0\x41\x02\xfa\x41\x34\n2bigquerydatatransfer.googleapis.com/TransferConfig"g\n\x1b\x44\x65leteTransferConfigRequest\x12H\n\x04name\x18\x01 \x01(\tB:\xe0\x41\x02\xfa\x41\x34\n2bigquerydatatransfer.googleapis.com/TransferConfig"^\n\x15GetTransferRunRequest\x12\x45\n\x04name\x18\x01 \x01(\tB7\xe0\x41\x02\xfa\x41\x31\n/bigquerydatatransfer.googleapis.com/TransferRun"a\n\x18\x44\x65leteTransferRunRequest\x12\x45\n\x04name\x18\x01 \x01(\tB7\xe0\x41\x02\xfa\x41\x31\n/bigquerydatatransfer.googleapis.com/TransferRun"\xa1\x01\n\x1aListTransferConfigsRequest\x12\x43\n\x06parent\x18\x01 \x01(\tB3\xe0\x41\x02\xfa\x41-\n+cloudresourcemanager.googleapis.com/Project\x12\x17\n\x0f\x64\x61ta_source_ids\x18\x02 \x03(\t\x12\x12\n\npage_token\x18\x03 \x01(\t\x12\x11\n\tpage_size\x18\x04 \x01(\x05"\x91\x01\n\x1bListTransferConfigsResponse\x12T\n\x10transfer_configs\x18\x01 \x03(\x0b\x32\x35.google.cloud.bigquery.datatransfer.v1.TransferConfigB\x03\xe0\x41\x03\x12\x1c\n\x0fnext_page_token\x18\x02 \x01(\tB\x03\xe0\x41\x03"\xe9\x02\n\x17ListTransferRunsRequest\x12J\n\x06parent\x18\x01 \x01(\tB:\xe0\x41\x02\xfa\x41\x34\n2bigquerydatatransfer.googleapis.com/TransferConfig\x12\x44\n\x06states\x18\x02 \x03(\x0e\x32\x34.google.cloud.bigquery.datatransfer.v1.TransferState\x12\x12\n\npage_token\x18\x03 \x01(\t\x12\x11\n\tpage_size\x18\x04 \x01(\x05\x12^\n\x0brun_attempt\x18\x05 \x01(\x0e\x32I.google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest.RunAttempt"5\n\nRunAttempt\x12\x1b\n\x17RUN_ATTEMPT_UNSPECIFIED\x10\x00\x12\n\n\x06LATEST\x10\x01"\x88\x01\n\x18ListTransferRunsResponse\x12N\n\rtransfer_runs\x18\x01 \x03(\x0b\x32\x32.google.cloud.bigquery.datatransfer.v1.TransferRunB\x03\xe0\x41\x03\x12\x1c\n\x0fnext_page_token\x18\x02 \x01(\tB\x03\xe0\x41\x03"\xe8\x01\n\x17ListTransferLogsRequest\x12G\n\x06parent\x18\x01 \x01(\tB7\xe0\x41\x02\xfa\x41\x31\n/bigquerydatatransfer.googleapis.com/TransferRun\x12\x12\n\npage_token\x18\x04 \x01(\t\x12\x11\n\tpage_size\x18\x05 \x01(\x05\x12]\n\rmessage_types\x18\x06 \x03(\x0e\x32\x46.google.cloud.bigquery.datatransfer.v1.TransferMessage.MessageSeverity"\x90\x01\n\x18ListTransferLogsResponse\x12V\n\x11transfer_messages\x18\x01 \x03(\x0b\x32\x36.google.cloud.bigquery.datatransfer.v1.TransferMessageB\x03\xe0\x41\x03\x12\x1c\n\x0fnext_page_token\x18\x02 \x01(\tB\x03\xe0\x41\x03"^\n\x16\x43heckValidCredsRequest\x12\x44\n\x04name\x18\x01 \x01(\tB6\xe0\x41\x02\xfa\x41\x30\n.bigquerydatatransfer.googleapis.com/DataSource"2\n\x17\x43heckValidCredsResponse\x12\x17\n\x0fhas_valid_creds\x18\x01 \x01(\x08"\xd1\x01\n\x1bScheduleTransferRunsRequest\x12J\n\x06parent\x18\x01 \x01(\tB:\xe0\x41\x02\xfa\x41\x34\n2bigquerydatatransfer.googleapis.com/TransferConfig\x12\x33\n\nstart_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x02\x12\x31\n\x08\x65nd_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x02"`\n\x1cScheduleTransferRunsResponse\x12@\n\x04runs\x18\x01 \x03(\x0b\x32\x32.google.cloud.bigquery.datatransfer.v1.TransferRun"\x87\x03\n\x1eStartManualTransferRunsRequest\x12G\n\x06parent\x18\x01 \x01(\tB7\xfa\x41\x34\n2bigquerydatatransfer.googleapis.com/TransferConfig\x12o\n\x14requested_time_range\x18\x03 \x01(\x0b\x32O.google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsRequest.TimeRangeH\x00\x12\x38\n\x12requested_run_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x1ai\n\tTimeRange\x12.\n\nstart_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x06\n\x04time"c\n\x1fStartManualTransferRunsResponse\x12@\n\x04runs\x18\x01 \x03(\x0b\x32\x32.google.cloud.bigquery.datatransfer.v1.TransferRun2\xc3\x1f\n\x13\x44\x61taTransferService\x12\xe6\x01\n\rGetDataSource\x12;.google.cloud.bigquery.datatransfer.v1.GetDataSourceRequest\x1a\x31.google.cloud.bigquery.datatransfer.v1.DataSource"e\x82\xd3\xe4\x93\x02X\x12//v1/{name=projects/*/locations/*/dataSources/*}Z%\x12#/v1/{name=projects/*/dataSources/*}\xda\x41\x04name\x12\xf9\x01\n\x0fListDataSources\x12=.google.cloud.bigquery.datatransfer.v1.ListDataSourcesRequest\x1a>.google.cloud.bigquery.datatransfer.v1.ListDataSourcesResponse"g\x82\xd3\xe4\x93\x02X\x12//v1/{parent=projects/*/locations/*}/dataSourcesZ%\x12#/v1/{parent=projects/*}/dataSources\xda\x41\x06parent\x12\xb6\x02\n\x14\x43reateTransferConfig\x12\x42.google.cloud.bigquery.datatransfer.v1.CreateTransferConfigRequest\x1a\x35.google.cloud.bigquery.datatransfer.v1.TransferConfig"\xa2\x01\x82\xd3\xe4\x93\x02\x82\x01"3/v1/{parent=projects/*/locations/*}/transferConfigs:\x0ftransfer_configZ:"\'/v1/{parent=projects/*}/transferConfigs:\x0ftransfer_config\xda\x41\x16parent,transfer_config\x12\xdb\x02\n\x14UpdateTransferConfig\x12\x42.google.cloud.bigquery.datatransfer.v1.UpdateTransferConfigRequest\x1a\x35.google.cloud.bigquery.datatransfer.v1.TransferConfig"\xc7\x01\x82\xd3\xe4\x93\x02\xa2\x01\x32\x43/v1/{transfer_config.name=projects/*/locations/*/transferConfigs/*}:\x0ftransfer_configZJ27/v1/{transfer_config.name=projects/*/transferConfigs/*}:\x0ftransfer_config\xda\x41\x1btransfer_config,update_mask\x12\xe1\x01\n\x14\x44\x65leteTransferConfig\x12\x42.google.cloud.bigquery.datatransfer.v1.DeleteTransferConfigRequest\x1a\x16.google.protobuf.Empty"m\x82\xd3\xe4\x93\x02`*3/v1/{name=projects/*/locations/*/transferConfigs/*}Z)*\'/v1/{name=projects/*/transferConfigs/*}\xda\x41\x04name\x12\xfa\x01\n\x11GetTransferConfig\x12?.google.cloud.bigquery.datatransfer.v1.GetTransferConfigRequest\x1a\x35.google.cloud.bigquery.datatransfer.v1.TransferConfig"m\x82\xd3\xe4\x93\x02`\x12\x33/v1/{name=projects/*/locations/*/transferConfigs/*}Z)\x12\'/v1/{name=projects/*/transferConfigs/*}\xda\x41\x04name\x12\x8d\x02\n\x13ListTransferConfigs\x12\x41.google.cloud.bigquery.datatransfer.v1.ListTransferConfigsRequest\x1a\x42.google.cloud.bigquery.datatransfer.v1.ListTransferConfigsResponse"o\x82\xd3\xe4\x93\x02`\x12\x33/v1/{parent=projects/*/locations/*}/transferConfigsZ)\x12\'/v1/{parent=projects/*}/transferConfigs\xda\x41\x06parent\x12\xcd\x02\n\x14ScheduleTransferRuns\x12\x42.google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsRequest\x1a\x43.google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsResponse"\xab\x01\x88\x02\x01\x82\xd3\xe4\x93\x02\x84\x01"B/v1/{parent=projects/*/locations/*/transferConfigs/*}:scheduleRuns:\x01*Z;"6/v1/{parent=projects/*/transferConfigs/*}:scheduleRuns:\x01*\xda\x41\x1aparent,start_time,end_time\x12\xbc\x02\n\x17StartManualTransferRuns\x12\x45.google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsRequest\x1a\x46.google.cloud.bigquery.datatransfer.v1.StartManualTransferRunsResponse"\x91\x01\x82\xd3\xe4\x93\x02\x8a\x01"E/v1/{parent=projects/*/locations/*/transferConfigs/*}:startManualRuns:\x01*Z>"9/v1/{parent=projects/*/transferConfigs/*}:startManualRuns:\x01*\x12\xff\x01\n\x0eGetTransferRun\x12<.google.cloud.bigquery.datatransfer.v1.GetTransferRunRequest\x1a\x32.google.cloud.bigquery.datatransfer.v1.TransferRun"{\x82\xd3\xe4\x93\x02n\x12:/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}Z0\x12./v1/{name=projects/*/transferConfigs/*/runs/*}\xda\x41\x04name\x12\xe9\x01\n\x11\x44\x65leteTransferRun\x12?.google.cloud.bigquery.datatransfer.v1.DeleteTransferRunRequest\x1a\x16.google.protobuf.Empty"{\x82\xd3\xe4\x93\x02n*:/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}Z0*./v1/{name=projects/*/transferConfigs/*/runs/*}\xda\x41\x04name\x12\x92\x02\n\x10ListTransferRuns\x12>.google.cloud.bigquery.datatransfer.v1.ListTransferRunsRequest\x1a?.google.cloud.bigquery.datatransfer.v1.ListTransferRunsResponse"}\x82\xd3\xe4\x93\x02n\x12:/v1/{parent=projects/*/locations/*/transferConfigs/*}/runsZ0\x12./v1/{parent=projects/*/transferConfigs/*}/runs\xda\x41\x06parent\x12\xb2\x02\n\x10ListTransferLogs\x12>.google.cloud.bigquery.datatransfer.v1.ListTransferLogsRequest\x1a?.google.cloud.bigquery.datatransfer.v1.ListTransferLogsResponse"\x9c\x01\x82\xd3\xe4\x93\x02\x8c\x01\x12I/v1/{parent=projects/*/locations/*/transferConfigs/*/runs/*}/transferLogsZ?\x12=/v1/{parent=projects/*/transferConfigs/*/runs/*}/transferLogs\xda\x41\x06parent\x12\x9e\x02\n\x0f\x43heckValidCreds\x12=.google.cloud.bigquery.datatransfer.v1.CheckValidCredsRequest\x1a>.google.cloud.bigquery.datatransfer.v1.CheckValidCredsResponse"\x8b\x01\x82\xd3\xe4\x93\x02~"?/v1/{name=projects/*/locations/*/dataSources/*}:checkValidCreds:\x01*Z8"3/v1/{name=projects/*/dataSources/*}:checkValidCreds:\x01*\xda\x41\x04name\x1aW\xca\x41#bigquerydatatransfer.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformB\xe3\x01\n)com.google.cloud.bigquery.datatransfer.v1B\x11\x44\x61taTransferProtoP\x01ZQgoogle.golang.org/genproto/googleapis/cloud/bigquery/datatransfer/v1;datatransfer\xaa\x02%Google.Cloud.BigQuery.DataTransfer.V1\xca\x02%Google\\Cloud\\BigQuery\\DataTransfer\\V1b\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_client__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, + google_dot_api_dot_resource__pb2.DESCRIPTOR, google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2.DESCRIPTOR, google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR, google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, google_dot_protobuf_dot_wrappers__pb2.DESCRIPTOR, - google_dot_api_dot_client__pb2.DESCRIPTOR, ], ) @@ -84,8 +88,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=921, - serialized_end=1026, + serialized_start=981, + serialized_end=1086, ) _sym_db.RegisterEnumDescriptor(_DATASOURCEPARAMETER_TYPE) @@ -119,8 +123,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=1796, - serialized_end=1911, + serialized_start=1861, + serialized_end=1976, ) _sym_db.RegisterEnumDescriptor(_DATASOURCE_AUTHORIZATIONTYPE) @@ -150,8 +154,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=1913, - serialized_end=2012, + serialized_start=1978, + serialized_end=2077, ) _sym_db.RegisterEnumDescriptor(_DATASOURCE_DATAREFRESHTYPE) @@ -174,8 +178,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=3312, - serialized_end=3365, + serialized_start=4008, + serialized_end=4061, ) _sym_db.RegisterEnumDescriptor(_LISTTRANSFERRUNSREQUEST_RUNATTEMPT) @@ -484,8 +488,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=381, - serialized_end=1026, + serialized_start=441, + serialized_end=1086, ) @@ -511,7 +515,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -824,13 +828,15 @@ extensions=[], nested_types=[], enum_types=[_DATASOURCE_AUTHORIZATIONTYPE, _DATASOURCE_DATAREFRESHTYPE], - serialized_options=None, + serialized_options=_b( + "\352A^\n.bigquerydatatransfer.googleapis.com/DataSource\022,projects/{project}/dataSources/{data_source}" + ), is_extendable=False, syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1029, - serialized_end=2012, + serialized_start=1089, + serialized_end=2176, ) @@ -856,7 +862,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A0\n.bigquerydatatransfer.googleapis.com/DataSource" + ), file=DESCRIPTOR, ) ], @@ -868,8 +876,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2014, - serialized_end=2050, + serialized_start=2178, + serialized_end=2270, ) @@ -895,7 +903,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A-\n+cloudresourcemanager.googleapis.com/Project" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -943,8 +953,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2052, - serialized_end=2131, + serialized_start=2273, + serialized_end=2405, ) @@ -988,7 +998,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], @@ -1000,8 +1010,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2133, - serialized_end=2256, + serialized_start=2408, + serialized_end=2536, ) @@ -1027,7 +1037,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A#\n!locations.googleapis.com/Location" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1045,7 +1057,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1093,8 +1105,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2259, - serialized_end=2434, + serialized_start=2539, + serialized_end=2762, ) @@ -1120,7 +1132,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1156,7 +1168,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1186,8 +1198,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2437, - serialized_end=2645, + serialized_start=2765, + serialized_end=2983, ) @@ -1213,7 +1225,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A4\n2bigquerydatatransfer.googleapis.com/TransferConfig" + ), file=DESCRIPTOR, ) ], @@ -1225,8 +1239,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2647, - serialized_end=2687, + serialized_start=2985, + serialized_end=3085, ) @@ -1252,7 +1266,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A4\n2bigquerydatatransfer.googleapis.com/TransferConfig" + ), file=DESCRIPTOR, ) ], @@ -1264,8 +1280,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2689, - serialized_end=2732, + serialized_start=3087, + serialized_end=3190, ) @@ -1291,7 +1307,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A1\n/bigquerydatatransfer.googleapis.com/TransferRun" + ), file=DESCRIPTOR, ) ], @@ -1303,8 +1321,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2734, - serialized_end=2771, + serialized_start=3192, + serialized_end=3286, ) @@ -1330,7 +1348,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A1\n/bigquerydatatransfer.googleapis.com/TransferRun" + ), file=DESCRIPTOR, ) ], @@ -1342,8 +1362,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2773, - serialized_end=2813, + serialized_start=3288, + serialized_end=3385, ) @@ -1369,7 +1389,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A-\n+cloudresourcemanager.googleapis.com/Project" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1435,8 +1457,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2815, - serialized_end=2923, + serialized_start=3388, + serialized_end=3549, ) @@ -1462,7 +1484,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1480,7 +1502,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], @@ -1492,8 +1514,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2926, - serialized_end=3061, + serialized_start=3552, + serialized_end=3697, ) @@ -1519,7 +1541,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A4\n2bigquerydatatransfer.googleapis.com/TransferConfig" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1603,8 +1627,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3064, - serialized_end=3365, + serialized_start=3700, + serialized_end=4061, ) @@ -1630,7 +1654,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1648,7 +1672,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], @@ -1660,8 +1684,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3367, - serialized_end=3493, + serialized_start=4064, + serialized_end=4200, ) @@ -1687,7 +1711,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A1\n/bigquerydatatransfer.googleapis.com/TransferRun" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1753,8 +1779,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3496, - serialized_end=3671, + serialized_start=4203, + serialized_end=4435, ) @@ -1780,7 +1806,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1798,7 +1824,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], @@ -1810,8 +1836,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3674, - serialized_end=3808, + serialized_start=4438, + serialized_end=4582, ) @@ -1837,7 +1863,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A0\n.bigquerydatatransfer.googleapis.com/DataSource" + ), file=DESCRIPTOR, ) ], @@ -1849,8 +1877,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3810, - serialized_end=3848, + serialized_start=4584, + serialized_end=4678, ) @@ -1888,8 +1916,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3850, - serialized_end=3900, + serialized_start=4680, + serialized_end=4730, ) @@ -1915,7 +1943,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A4\n2bigquerydatatransfer.googleapis.com/TransferConfig" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1933,7 +1963,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1951,7 +1981,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -1963,8 +1993,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3903, - serialized_end=4042, + serialized_start=4733, + serialized_end=4942, ) @@ -2002,8 +2032,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4044, - serialized_end=4140, + serialized_start=4944, + serialized_end=5040, ) @@ -2059,8 +2089,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4364, - serialized_end=4469, + serialized_start=5321, + serialized_end=5426, ) _STARTMANUALTRANSFERRUNSREQUEST = _descriptor.Descriptor( @@ -2085,7 +2115,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\372A4\n2bigquerydatatransfer.googleapis.com/TransferConfig" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2141,8 +2173,8 @@ fields=[], ) ], - serialized_start=4143, - serialized_end=4477, + serialized_start=5043, + serialized_end=5434, ) @@ -2180,8 +2212,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4479, - serialized_end=4578, + serialized_start=5436, + serialized_end=5535, ) _DATASOURCEPARAMETER.fields_by_name["type"].enum_type = _DATASOURCEPARAMETER_TYPE @@ -2473,8 +2505,8 @@ Attributes: name: - The field will contain name of the resource requested, for - example: + Required. The field will contain name of the resource + requested, for example: ``projects/{project_id}/dataSources/{data_source_id}`` """, # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.GetDataSourceRequest) @@ -2493,8 +2525,9 @@ Attributes: parent: - The BigQuery project id for which data sources should be - returned. Must be in the form: ``projects/{project_id}`` + Required. The BigQuery project id for which data sources + should be returned. Must be in the form: + ``projects/{project_id}`` page_token: Pagination token, which can be used to request a specific page of ``ListDataSourcesRequest`` list results. For multiple-page @@ -2549,13 +2582,13 @@ Attributes: parent: - The BigQuery project id where the transfer configuration - should be created. Must be in the format + Required. The BigQuery project id where the transfer + configuration should be created. Must be in the format projects/{project\_id}/locations/{location\_id} If specified location and location of the destination bigquery dataset do not match - the request will fail. transfer_config: - Data transfer configuration to create. + Required. Data transfer configuration to create. authorization_code: Optional OAuth2 authorization code to use with this transfer configuration. This is required if new credentials are needed, @@ -2599,7 +2632,7 @@ Attributes: transfer_config: - Data transfer configuration to create. + Required. Data transfer configuration to create. authorization_code: Optional OAuth2 authorization code to use with this transfer configuration. If it is provided, the transfer configuration @@ -2618,7 +2651,8 @@ with the page text prompting the user to copy the code and paste it in the application. update_mask: - Required list of fields to be updated in this request. + Required. Required list of fields to be updated in this + request. version_info: Optional version info. If users want to find a very recent access token, that is, immediately after approving access, @@ -2644,8 +2678,9 @@ Attributes: name: - The field will contain name of the resource requested, for - example: ``projects/{project_id}/transferConfigs/{config_id}`` + Required. The field will contain name of the resource + requested, for example: + ``projects/{project_id}/transferConfigs/{config_id}`` """, # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.GetTransferConfigRequest) ), @@ -2664,8 +2699,9 @@ Attributes: name: - The field will contain name of the resource requested, for - example: ``projects/{project_id}/transferConfigs/{config_id}`` + Required. The field will contain name of the resource + requested, for example: + ``projects/{project_id}/transferConfigs/{config_id}`` """, # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.DeleteTransferConfigRequest) ), @@ -2683,9 +2719,9 @@ Attributes: name: - The field will contain name of the resource requested, for - example: ``projects/{project_id}/transferConfigs/{config_id}/r - uns/{run_id}`` + Required. The field will contain name of the resource + requested, for example: ``projects/{project_id}/transferConfig + s/{config_id}/runs/{run_id}`` """, # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.GetTransferRunRequest) ), @@ -2703,9 +2739,9 @@ Attributes: name: - The field will contain name of the resource requested, for - example: ``projects/{project_id}/transferConfigs/{config_id}/r - uns/{run_id}`` + Required. The field will contain name of the resource + requested, for example: ``projects/{project_id}/transferConfig + s/{config_id}/runs/{run_id}`` """, # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.DeleteTransferRunRequest) ), @@ -2723,8 +2759,8 @@ Attributes: parent: - The BigQuery project id for which data sources should be - returned: ``projects/{project_id}``. + Required. The BigQuery project id for which data sources + should be returned: ``projects/{project_id}``. data_source_ids: When specified, only configurations of requested data sources are returned. @@ -2779,9 +2815,10 @@ Attributes: parent: - Name of transfer configuration for which transfer runs should - be retrieved. Format of transfer configuration resource name - is: ``projects/{project_id}/transferConfigs/{config_id}``. + Required. Name of transfer configuration for which transfer + runs should be retrieved. Format of transfer configuration + resource name is: + ``projects/{project_id}/transferConfigs/{config_id}``. states: When specified, only transfer runs with requested states are returned. @@ -2837,8 +2874,8 @@ Attributes: parent: - Transfer run name in the form: ``projects/{project_id}/transfe - rConfigs/{config_Id}/runs/{run_id}``. + Required. Transfer run name in the form: ``projects/{project_i + d}/transferConfigs/{config_Id}/runs/{run_id}``. page_token: Pagination token, which can be used to request a specific page of ``ListTransferLogsRequest`` list results. For multiple-page @@ -2896,7 +2933,7 @@ Attributes: name: - The data source in the form: + Required. The data source in the form: ``projects/{project_id}/dataSources/{data_source_id}`` """, # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.CheckValidCredsRequest) @@ -2933,13 +2970,13 @@ Attributes: parent: - Transfer configuration name in the form: + Required. Transfer configuration name in the form: ``projects/{project_id}/transferConfigs/{config_id}``. start_time: - Start time of the range of transfer runs. For example, - ``"2017-05-25T00:00:00+00:00"``. + Required. Start time of the range of transfer runs. For + example, ``"2017-05-25T00:00:00+00:00"``. end_time: - End time of the range of transfer runs. For example, + Required. End time of the range of transfer runs. For example, ``"2017-05-30T00:00:00+00:00"``. """, # @@protoc_insertion_point(class_scope:google.cloud.bigquery.datatransfer.v1.ScheduleTransferRunsRequest) @@ -3039,8 +3076,35 @@ DESCRIPTOR._options = None +_DATASOURCE.fields_by_name["name"]._options = None _DATASOURCE.fields_by_name["transfer_type"]._options = None _DATASOURCE.fields_by_name["supports_multiple_transfers"]._options = None +_DATASOURCE._options = None +_GETDATASOURCEREQUEST.fields_by_name["name"]._options = None +_LISTDATASOURCESREQUEST.fields_by_name["parent"]._options = None +_LISTDATASOURCESRESPONSE.fields_by_name["next_page_token"]._options = None +_CREATETRANSFERCONFIGREQUEST.fields_by_name["parent"]._options = None +_CREATETRANSFERCONFIGREQUEST.fields_by_name["transfer_config"]._options = None +_UPDATETRANSFERCONFIGREQUEST.fields_by_name["transfer_config"]._options = None +_UPDATETRANSFERCONFIGREQUEST.fields_by_name["update_mask"]._options = None +_GETTRANSFERCONFIGREQUEST.fields_by_name["name"]._options = None +_DELETETRANSFERCONFIGREQUEST.fields_by_name["name"]._options = None +_GETTRANSFERRUNREQUEST.fields_by_name["name"]._options = None +_DELETETRANSFERRUNREQUEST.fields_by_name["name"]._options = None +_LISTTRANSFERCONFIGSREQUEST.fields_by_name["parent"]._options = None +_LISTTRANSFERCONFIGSRESPONSE.fields_by_name["transfer_configs"]._options = None +_LISTTRANSFERCONFIGSRESPONSE.fields_by_name["next_page_token"]._options = None +_LISTTRANSFERRUNSREQUEST.fields_by_name["parent"]._options = None +_LISTTRANSFERRUNSRESPONSE.fields_by_name["transfer_runs"]._options = None +_LISTTRANSFERRUNSRESPONSE.fields_by_name["next_page_token"]._options = None +_LISTTRANSFERLOGSREQUEST.fields_by_name["parent"]._options = None +_LISTTRANSFERLOGSRESPONSE.fields_by_name["transfer_messages"]._options = None +_LISTTRANSFERLOGSRESPONSE.fields_by_name["next_page_token"]._options = None +_CHECKVALIDCREDSREQUEST.fields_by_name["name"]._options = None +_SCHEDULETRANSFERRUNSREQUEST.fields_by_name["parent"]._options = None +_SCHEDULETRANSFERRUNSREQUEST.fields_by_name["start_time"]._options = None +_SCHEDULETRANSFERRUNSREQUEST.fields_by_name["end_time"]._options = None +_STARTMANUALTRANSFERRUNSREQUEST.fields_by_name["parent"]._options = None _DATATRANSFERSERVICE = _descriptor.ServiceDescriptor( name="DataTransferService", @@ -3050,8 +3114,8 @@ serialized_options=_b( "\312A#bigquerydatatransfer.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform" ), - serialized_start=4581, - serialized_end=8454, + serialized_start=5538, + serialized_end=9573, methods=[ _descriptor.MethodDescriptor( name="GetDataSource", @@ -3061,7 +3125,7 @@ input_type=_GETDATASOURCEREQUEST, output_type=_DATASOURCE, serialized_options=_b( - "\202\323\344\223\002X\022//v1/{name=projects/*/locations/*/dataSources/*}Z%\022#/v1/{name=projects/*/dataSources/*}" + "\202\323\344\223\002X\022//v1/{name=projects/*/locations/*/dataSources/*}Z%\022#/v1/{name=projects/*/dataSources/*}\332A\004name" ), ), _descriptor.MethodDescriptor( @@ -3072,7 +3136,7 @@ input_type=_LISTDATASOURCESREQUEST, output_type=_LISTDATASOURCESRESPONSE, serialized_options=_b( - "\202\323\344\223\002X\022//v1/{parent=projects/*/locations/*}/dataSourcesZ%\022#/v1/{parent=projects/*}/dataSources" + "\202\323\344\223\002X\022//v1/{parent=projects/*/locations/*}/dataSourcesZ%\022#/v1/{parent=projects/*}/dataSources\332A\006parent" ), ), _descriptor.MethodDescriptor( @@ -3083,7 +3147,7 @@ input_type=_CREATETRANSFERCONFIGREQUEST, output_type=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERCONFIG, serialized_options=_b( - '\202\323\344\223\002\202\001"3/v1/{parent=projects/*/locations/*}/transferConfigs:\017transfer_configZ:"\'/v1/{parent=projects/*}/transferConfigs:\017transfer_config' + '\202\323\344\223\002\202\001"3/v1/{parent=projects/*/locations/*}/transferConfigs:\017transfer_configZ:"\'/v1/{parent=projects/*}/transferConfigs:\017transfer_config\332A\026parent,transfer_config' ), ), _descriptor.MethodDescriptor( @@ -3094,7 +3158,7 @@ input_type=_UPDATETRANSFERCONFIGREQUEST, output_type=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERCONFIG, serialized_options=_b( - "\202\323\344\223\002\242\0012C/v1/{transfer_config.name=projects/*/locations/*/transferConfigs/*}:\017transfer_configZJ27/v1/{transfer_config.name=projects/*/transferConfigs/*}:\017transfer_config" + "\202\323\344\223\002\242\0012C/v1/{transfer_config.name=projects/*/locations/*/transferConfigs/*}:\017transfer_configZJ27/v1/{transfer_config.name=projects/*/transferConfigs/*}:\017transfer_config\332A\033transfer_config,update_mask" ), ), _descriptor.MethodDescriptor( @@ -3105,7 +3169,7 @@ input_type=_DELETETRANSFERCONFIGREQUEST, output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, serialized_options=_b( - "\202\323\344\223\002`*3/v1/{name=projects/*/locations/*/transferConfigs/*}Z)*'/v1/{name=projects/*/transferConfigs/*}" + "\202\323\344\223\002`*3/v1/{name=projects/*/locations/*/transferConfigs/*}Z)*'/v1/{name=projects/*/transferConfigs/*}\332A\004name" ), ), _descriptor.MethodDescriptor( @@ -3116,7 +3180,7 @@ input_type=_GETTRANSFERCONFIGREQUEST, output_type=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERCONFIG, serialized_options=_b( - "\202\323\344\223\002`\0223/v1/{name=projects/*/locations/*/transferConfigs/*}Z)\022'/v1/{name=projects/*/transferConfigs/*}" + "\202\323\344\223\002`\0223/v1/{name=projects/*/locations/*/transferConfigs/*}Z)\022'/v1/{name=projects/*/transferConfigs/*}\332A\004name" ), ), _descriptor.MethodDescriptor( @@ -3127,7 +3191,7 @@ input_type=_LISTTRANSFERCONFIGSREQUEST, output_type=_LISTTRANSFERCONFIGSRESPONSE, serialized_options=_b( - "\202\323\344\223\002`\0223/v1/{parent=projects/*/locations/*}/transferConfigsZ)\022'/v1/{parent=projects/*}/transferConfigs" + "\202\323\344\223\002`\0223/v1/{parent=projects/*/locations/*}/transferConfigsZ)\022'/v1/{parent=projects/*}/transferConfigs\332A\006parent" ), ), _descriptor.MethodDescriptor( @@ -3138,7 +3202,7 @@ input_type=_SCHEDULETRANSFERRUNSREQUEST, output_type=_SCHEDULETRANSFERRUNSRESPONSE, serialized_options=_b( - '\210\002\001\202\323\344\223\002\204\001"B/v1/{parent=projects/*/locations/*/transferConfigs/*}:scheduleRuns:\001*Z;"6/v1/{parent=projects/*/transferConfigs/*}:scheduleRuns:\001*' + '\210\002\001\202\323\344\223\002\204\001"B/v1/{parent=projects/*/locations/*/transferConfigs/*}:scheduleRuns:\001*Z;"6/v1/{parent=projects/*/transferConfigs/*}:scheduleRuns:\001*\332A\032parent,start_time,end_time' ), ), _descriptor.MethodDescriptor( @@ -3160,7 +3224,7 @@ input_type=_GETTRANSFERRUNREQUEST, output_type=google_dot_cloud_dot_bigquery_dot_datatransfer__v1_dot_proto_dot_transfer__pb2._TRANSFERRUN, serialized_options=_b( - "\202\323\344\223\002n\022:/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}Z0\022./v1/{name=projects/*/transferConfigs/*/runs/*}" + "\202\323\344\223\002n\022:/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}Z0\022./v1/{name=projects/*/transferConfigs/*/runs/*}\332A\004name" ), ), _descriptor.MethodDescriptor( @@ -3171,7 +3235,7 @@ input_type=_DELETETRANSFERRUNREQUEST, output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, serialized_options=_b( - "\202\323\344\223\002n*:/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}Z0*./v1/{name=projects/*/transferConfigs/*/runs/*}" + "\202\323\344\223\002n*:/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}Z0*./v1/{name=projects/*/transferConfigs/*/runs/*}\332A\004name" ), ), _descriptor.MethodDescriptor( @@ -3182,7 +3246,7 @@ input_type=_LISTTRANSFERRUNSREQUEST, output_type=_LISTTRANSFERRUNSRESPONSE, serialized_options=_b( - "\202\323\344\223\002n\022:/v1/{parent=projects/*/locations/*/transferConfigs/*}/runsZ0\022./v1/{parent=projects/*/transferConfigs/*}/runs" + "\202\323\344\223\002n\022:/v1/{parent=projects/*/locations/*/transferConfigs/*}/runsZ0\022./v1/{parent=projects/*/transferConfigs/*}/runs\332A\006parent" ), ), _descriptor.MethodDescriptor( @@ -3193,7 +3257,7 @@ input_type=_LISTTRANSFERLOGSREQUEST, output_type=_LISTTRANSFERLOGSRESPONSE, serialized_options=_b( - "\202\323\344\223\002\214\001\022I/v1/{parent=projects/*/locations/*/transferConfigs/*/runs/*}/transferLogsZ?\022=/v1/{parent=projects/*/transferConfigs/*/runs/*}/transferLogs" + "\202\323\344\223\002\214\001\022I/v1/{parent=projects/*/locations/*/transferConfigs/*/runs/*}/transferLogsZ?\022=/v1/{parent=projects/*/transferConfigs/*/runs/*}/transferLogs\332A\006parent" ), ), _descriptor.MethodDescriptor( @@ -3204,7 +3268,7 @@ input_type=_CHECKVALIDCREDSREQUEST, output_type=_CHECKVALIDCREDSRESPONSE, serialized_options=_b( - '\202\323\344\223\002~"?/v1/{name=projects/*/locations/*/dataSources/*}:checkValidCreds:\001*Z8"3/v1/{name=projects/*/dataSources/*}:checkValidCreds:\001*' + '\202\323\344\223\002~"?/v1/{name=projects/*/locations/*/dataSources/*}:checkValidCreds:\001*Z8"3/v1/{name=projects/*/dataSources/*}:checkValidCreds:\001*\332A\004name' ), ), ], diff --git a/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/proto/transfer.proto b/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/proto/transfer.proto index 9501ea3cc3b3..b0982d286865 100644 --- a/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/proto/transfer.proto +++ b/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/proto/transfer.proto @@ -17,7 +17,8 @@ syntax = "proto3"; package google.cloud.bigquery.datatransfer.v1; -import "google/api/annotations.proto"; +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; import "google/protobuf/struct.proto"; import "google/protobuf/timestamp.proto"; import "google/rpc/status.proto"; @@ -96,6 +97,11 @@ message ScheduleOptions { // `destination_dataset_id` is created when needed and shared with the // appropriate data source service account. message TransferConfig { + option (google.api.resource) = { + type: "bigquerydatatransfer.googleapis.com/TransferConfig" + pattern: "projects/{project}/transferConfigs/{transfer_config}" + }; + // The resource name of the transfer config. // Transfer config names have the form of // `projects/{project_id}/locations/{region}/transferConfigs/{config_id}`. @@ -105,8 +111,11 @@ message TransferConfig { // required, will be generated for config_id. string name = 1; - // The BigQuery target dataset id. - string destination_dataset_id = 2; + // The desination of the transfer config. + oneof destination { + // The BigQuery target dataset id. + string destination_dataset_id = 2; + } // User specified display name for the data transfer. string display_name = 3; @@ -147,23 +156,28 @@ message TransferConfig { bool disabled = 13; // Output only. Data transfer modification time. Ignored by server on input. - google.protobuf.Timestamp update_time = 4; + google.protobuf.Timestamp update_time = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. Next time when data transfer will run. - google.protobuf.Timestamp next_run_time = 8; + google.protobuf.Timestamp next_run_time = 8 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. State of the most recently updated transfer run. - TransferState state = 10; + TransferState state = 10 [(google.api.field_behavior) = OUTPUT_ONLY]; // Deprecated. Unique ID of the user on whose behalf transfer is done. int64 user_id = 11; // Output only. Region in which BigQuery dataset is located. - string dataset_region = 14; + string dataset_region = 14 [(google.api.field_behavior) = OUTPUT_ONLY]; } // Represents a data transfer run. message TransferRun { + option (google.api.resource) = { + type: "bigquerydatatransfer.googleapis.com/TransferRun" + pattern: "projects/{project}/locations/{location}/transferConfigs/{transfer_config}/runs/{run}" + }; + // The resource name of the transfer run. // Transfer run names have the form // `projects/{project_id}/locations/{location}/transferConfigs/{config_id}/runs/{run_id}`. @@ -182,23 +196,26 @@ message TransferRun { // Output only. Time when transfer run was started. // Parameter ignored by server for input requests. - google.protobuf.Timestamp start_time = 4; + google.protobuf.Timestamp start_time = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. Time when transfer run ended. // Parameter ignored by server for input requests. - google.protobuf.Timestamp end_time = 5; + google.protobuf.Timestamp end_time = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. Last time the data transfer run state was updated. - google.protobuf.Timestamp update_time = 6; + google.protobuf.Timestamp update_time = 6 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. Data transfer specific parameters. - google.protobuf.Struct params = 9; + google.protobuf.Struct params = 9 [(google.api.field_behavior) = OUTPUT_ONLY]; - // Output only. The BigQuery target dataset id. - string destination_dataset_id = 2; + // Data transfer destination. + oneof destination { + // Output only. The BigQuery target dataset id. + string destination_dataset_id = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; + } // Output only. Data source id. - string data_source_id = 7; + string data_source_id = 7 [(google.api.field_behavior) = OUTPUT_ONLY]; // Data transfer run state. Ignored for input requests. TransferState state = 8; @@ -211,7 +228,7 @@ message TransferRun { // scheduled manually, this is empty. // NOTE: the system might choose to delay the schedule depending on the // current load, so `schedule_time` doesn't always match this. - string schedule = 12; + string schedule = 12 [(google.api.field_behavior) = OUTPUT_ONLY]; } // Represents a user facing message for a particular data transfer run. diff --git a/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/proto/transfer_pb2.py b/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/proto/transfer_pb2.py index 437774a5f201..64927cc95851 100644 --- a/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/proto/transfer_pb2.py +++ b/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/proto/transfer_pb2.py @@ -16,7 +16,8 @@ _sym_db = _symbol_database.Default() -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 +from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 from google.rpc import status_pb2 as google_dot_rpc_dot_status__pb2 @@ -30,10 +31,11 @@ "\n)com.google.cloud.bigquery.datatransfer.v1B\rTransferProtoP\001ZQgoogle.golang.org/genproto/googleapis/cloud/bigquery/datatransfer/v1;datatransfer\242\002\005GCBDT\252\002%Google.Cloud.BigQuery.DataTransfer.V1\312\002%Google\\Cloud\\BigQuery\\DataTransfer\\V1" ), serialized_pb=_b( - '\n:google/cloud/bigquery/datatransfer_v1/proto/transfer.proto\x12%google.cloud.bigquery.datatransfer.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto"\x90\x01\n\x0fScheduleOptions\x12\x1f\n\x17\x64isable_auto_scheduling\x18\x03 \x01(\x08\x12.\n\nstart_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"\xff\x03\n\x0eTransferConfig\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x1e\n\x16\x64\x65stination_dataset_id\x18\x02 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x03 \x01(\t\x12\x16\n\x0e\x64\x61ta_source_id\x18\x05 \x01(\t\x12\'\n\x06params\x18\t \x01(\x0b\x32\x17.google.protobuf.Struct\x12\x10\n\x08schedule\x18\x07 \x01(\t\x12P\n\x10schedule_options\x18\x18 \x01(\x0b\x32\x36.google.cloud.bigquery.datatransfer.v1.ScheduleOptions\x12 \n\x18\x64\x61ta_refresh_window_days\x18\x0c \x01(\x05\x12\x10\n\x08\x64isabled\x18\r \x01(\x08\x12/\n\x0bupdate_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x31\n\rnext_run_time\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x43\n\x05state\x18\n \x01(\x0e\x32\x34.google.cloud.bigquery.datatransfer.v1.TransferState\x12\x0f\n\x07user_id\x18\x0b \x01(\x03\x12\x16\n\x0e\x64\x61taset_region\x18\x0e \x01(\t"\xfe\x03\n\x0bTransferRun\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x31\n\rschedule_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08run_time\x18\n \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12(\n\x0c\x65rror_status\x18\x15 \x01(\x0b\x32\x12.google.rpc.Status\x12.\n\nstart_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\'\n\x06params\x18\t \x01(\x0b\x32\x17.google.protobuf.Struct\x12\x1e\n\x16\x64\x65stination_dataset_id\x18\x02 \x01(\t\x12\x16\n\x0e\x64\x61ta_source_id\x18\x07 \x01(\t\x12\x43\n\x05state\x18\x08 \x01(\x0e\x32\x34.google.cloud.bigquery.datatransfer.v1.TransferState\x12\x0f\n\x07user_id\x18\x0b \x01(\x03\x12\x10\n\x08schedule\x18\x0c \x01(\t"\x8a\x02\n\x0fTransferMessage\x12\x30\n\x0cmessage_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12X\n\x08severity\x18\x02 \x01(\x0e\x32\x46.google.cloud.bigquery.datatransfer.v1.TransferMessage.MessageSeverity\x12\x14\n\x0cmessage_text\x18\x03 \x01(\t"U\n\x0fMessageSeverity\x12 \n\x1cMESSAGE_SEVERITY_UNSPECIFIED\x10\x00\x12\x08\n\x04INFO\x10\x01\x12\x0b\n\x07WARNING\x10\x02\x12\t\n\x05\x45RROR\x10\x03*K\n\x0cTransferType\x12\x1d\n\x19TRANSFER_TYPE_UNSPECIFIED\x10\x00\x12\t\n\x05\x42\x41TCH\x10\x01\x12\r\n\tSTREAMING\x10\x02\x1a\x02\x18\x01*s\n\rTransferState\x12\x1e\n\x1aTRANSFER_STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07PENDING\x10\x02\x12\x0b\n\x07RUNNING\x10\x03\x12\r\n\tSUCCEEDED\x10\x04\x12\n\n\x06\x46\x41ILED\x10\x05\x12\r\n\tCANCELLED\x10\x06\x42\xe7\x01\n)com.google.cloud.bigquery.datatransfer.v1B\rTransferProtoP\x01ZQgoogle.golang.org/genproto/googleapis/cloud/bigquery/datatransfer/v1;datatransfer\xa2\x02\x05GCBDT\xaa\x02%Google.Cloud.BigQuery.DataTransfer.V1\xca\x02%Google\\Cloud\\BigQuery\\DataTransfer\\V1b\x06proto3' + '\n:google/cloud/bigquery/datatransfer_v1/proto/transfer.proto\x12%google.cloud.bigquery.datatransfer.v1\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto"\x90\x01\n\x0fScheduleOptions\x12\x1f\n\x17\x64isable_auto_scheduling\x18\x03 \x01(\x08\x12.\n\nstart_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"\x93\x05\n\x0eTransferConfig\x12\x0c\n\x04name\x18\x01 \x01(\t\x12 \n\x16\x64\x65stination_dataset_id\x18\x02 \x01(\tH\x00\x12\x14\n\x0c\x64isplay_name\x18\x03 \x01(\t\x12\x16\n\x0e\x64\x61ta_source_id\x18\x05 \x01(\t\x12\'\n\x06params\x18\t \x01(\x0b\x32\x17.google.protobuf.Struct\x12\x10\n\x08schedule\x18\x07 \x01(\t\x12P\n\x10schedule_options\x18\x18 \x01(\x0b\x32\x36.google.cloud.bigquery.datatransfer.v1.ScheduleOptions\x12 \n\x18\x64\x61ta_refresh_window_days\x18\x0c \x01(\x05\x12\x10\n\x08\x64isabled\x18\r \x01(\x08\x12\x34\n\x0bupdate_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x36\n\rnext_run_time\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12H\n\x05state\x18\n \x01(\x0e\x32\x34.google.cloud.bigquery.datatransfer.v1.TransferStateB\x03\xe0\x41\x03\x12\x0f\n\x07user_id\x18\x0b \x01(\x03\x12\x1b\n\x0e\x64\x61taset_region\x18\x0e \x01(\tB\x03\xe0\x41\x03:m\xea\x41j\n2bigquerydatatransfer.googleapis.com/TransferConfig\x12\x34projects/{project}/transferConfigs/{transfer_config}B\r\n\x0b\x64\x65stination"\xc0\x05\n\x0bTransferRun\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x31\n\rschedule_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08run_time\x18\n \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12(\n\x0c\x65rror_status\x18\x15 \x01(\x0b\x32\x12.google.rpc.Status\x12\x33\n\nstart_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x31\n\x08\x65nd_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x34\n\x0bupdate_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12,\n\x06params\x18\t \x01(\x0b\x32\x17.google.protobuf.StructB\x03\xe0\x41\x03\x12%\n\x16\x64\x65stination_dataset_id\x18\x02 \x01(\tB\x03\xe0\x41\x03H\x00\x12\x1b\n\x0e\x64\x61ta_source_id\x18\x07 \x01(\tB\x03\xe0\x41\x03\x12\x43\n\x05state\x18\x08 \x01(\x0e\x32\x34.google.cloud.bigquery.datatransfer.v1.TransferState\x12\x0f\n\x07user_id\x18\x0b \x01(\x03\x12\x15\n\x08schedule\x18\x0c \x01(\tB\x03\xe0\x41\x03:\x8b\x01\xea\x41\x87\x01\n/bigquerydatatransfer.googleapis.com/TransferRun\x12Tprojects/{project}/locations/{location}/transferConfigs/{transfer_config}/runs/{run}B\r\n\x0b\x64\x65stination"\x8a\x02\n\x0fTransferMessage\x12\x30\n\x0cmessage_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12X\n\x08severity\x18\x02 \x01(\x0e\x32\x46.google.cloud.bigquery.datatransfer.v1.TransferMessage.MessageSeverity\x12\x14\n\x0cmessage_text\x18\x03 \x01(\t"U\n\x0fMessageSeverity\x12 \n\x1cMESSAGE_SEVERITY_UNSPECIFIED\x10\x00\x12\x08\n\x04INFO\x10\x01\x12\x0b\n\x07WARNING\x10\x02\x12\t\n\x05\x45RROR\x10\x03*K\n\x0cTransferType\x12\x1d\n\x19TRANSFER_TYPE_UNSPECIFIED\x10\x00\x12\t\n\x05\x42\x41TCH\x10\x01\x12\r\n\tSTREAMING\x10\x02\x1a\x02\x18\x01*s\n\rTransferState\x12\x1e\n\x1aTRANSFER_STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07PENDING\x10\x02\x12\x0b\n\x07RUNNING\x10\x03\x12\r\n\tSUCCEEDED\x10\x04\x12\n\n\x06\x46\x41ILED\x10\x05\x12\r\n\tCANCELLED\x10\x06\x42\xe7\x01\n)com.google.cloud.bigquery.datatransfer.v1B\rTransferProtoP\x01ZQgoogle.golang.org/genproto/googleapis/cloud/bigquery/datatransfer/v1;datatransfer\xa2\x02\x05GCBDT\xaa\x02%Google.Cloud.BigQuery.DataTransfer.V1\xca\x02%Google\\Cloud\\BigQuery\\DataTransfer\\V1b\x06proto3' ), dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, + google_dot_api_dot_resource__pb2.DESCRIPTOR, google_dot_protobuf_dot_struct__pb2.DESCRIPTOR, google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, google_dot_rpc_dot_status__pb2.DESCRIPTOR, @@ -62,8 +64,8 @@ ], containing_type=None, serialized_options=_b("\030\001"), - serialized_start=1662, - serialized_end=1737, + serialized_start=2034, + serialized_end=2109, ) _sym_db.RegisterEnumDescriptor(_TRANSFERTYPE) @@ -99,8 +101,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=1739, - serialized_end=1854, + serialized_start=2111, + serialized_end=2226, ) _sym_db.RegisterEnumDescriptor(_TRANSFERSTATE) @@ -141,8 +143,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=1575, - serialized_end=1660, + serialized_start=1947, + serialized_end=2032, ) _sym_db.RegisterEnumDescriptor(_TRANSFERMESSAGE_MESSAGESEVERITY) @@ -217,8 +219,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=220, - serialized_end=364, + serialized_start=250, + serialized_end=394, ) @@ -406,7 +408,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -424,7 +426,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -442,7 +444,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -478,20 +480,30 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], extensions=[], nested_types=[], enum_types=[], - serialized_options=None, + serialized_options=_b( + "\352Aj\n2bigquerydatatransfer.googleapis.com/TransferConfig\0224projects/{project}/transferConfigs/{transfer_config}" + ), is_extendable=False, syntax="proto3", extension_ranges=[], - oneofs=[], - serialized_start=367, - serialized_end=878, + oneofs=[ + _descriptor.OneofDescriptor( + name="destination", + full_name="google.cloud.bigquery.datatransfer.v1.TransferConfig.destination", + index=0, + containing_type=None, + fields=[], + ) + ], + serialized_start=397, + serialized_end=1056, ) @@ -589,7 +601,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -607,7 +619,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -625,7 +637,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -643,7 +655,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -661,7 +673,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -679,7 +691,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -733,20 +745,30 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], extensions=[], nested_types=[], enum_types=[], - serialized_options=None, + serialized_options=_b( + "\352A\207\001\n/bigquerydatatransfer.googleapis.com/TransferRun\022Tprojects/{project}/locations/{location}/transferConfigs/{transfer_config}/runs/{run}" + ), is_extendable=False, syntax="proto3", extension_ranges=[], - oneofs=[], - serialized_start=881, - serialized_end=1391, + oneofs=[ + _descriptor.OneofDescriptor( + name="destination", + full_name="google.cloud.bigquery.datatransfer.v1.TransferRun.destination", + index=0, + containing_type=None, + fields=[], + ) + ], + serialized_start=1059, + serialized_end=1763, ) @@ -820,8 +842,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1394, - serialized_end=1660, + serialized_start=1766, + serialized_end=2032, ) _SCHEDULEOPTIONS.fields_by_name[ @@ -841,6 +863,12 @@ "next_run_time" ].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP _TRANSFERCONFIG.fields_by_name["state"].enum_type = _TRANSFERSTATE +_TRANSFERCONFIG.oneofs_by_name["destination"].fields.append( + _TRANSFERCONFIG.fields_by_name["destination_dataset_id"] +) +_TRANSFERCONFIG.fields_by_name[ + "destination_dataset_id" +].containing_oneof = _TRANSFERCONFIG.oneofs_by_name["destination"] _TRANSFERRUN.fields_by_name[ "schedule_time" ].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP @@ -863,6 +891,12 @@ "params" ].message_type = google_dot_protobuf_dot_struct__pb2._STRUCT _TRANSFERRUN.fields_by_name["state"].enum_type = _TRANSFERSTATE +_TRANSFERRUN.oneofs_by_name["destination"].fields.append( + _TRANSFERRUN.fields_by_name["destination_dataset_id"] +) +_TRANSFERRUN.fields_by_name[ + "destination_dataset_id" +].containing_oneof = _TRANSFERRUN.oneofs_by_name["destination"] _TRANSFERMESSAGE.fields_by_name[ "message_time" ].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP @@ -934,6 +968,8 @@ If config\_id is not provided, usually a uuid, even though it is not guaranteed or required, will be generated for config\_id. + destination: + The desination of the transfer config. destination_dataset_id: The BigQuery target dataset id. display_name: @@ -1017,6 +1053,8 @@ updated. params: Output only. Data transfer specific parameters. + destination: + Data transfer destination. destination_dataset_id: Output only. The BigQuery target dataset id. data_source_id: @@ -1063,4 +1101,17 @@ DESCRIPTOR._options = None _TRANSFERTYPE._options = None +_TRANSFERCONFIG.fields_by_name["update_time"]._options = None +_TRANSFERCONFIG.fields_by_name["next_run_time"]._options = None +_TRANSFERCONFIG.fields_by_name["state"]._options = None +_TRANSFERCONFIG.fields_by_name["dataset_region"]._options = None +_TRANSFERCONFIG._options = None +_TRANSFERRUN.fields_by_name["start_time"]._options = None +_TRANSFERRUN.fields_by_name["end_time"]._options = None +_TRANSFERRUN.fields_by_name["update_time"]._options = None +_TRANSFERRUN.fields_by_name["params"]._options = None +_TRANSFERRUN.fields_by_name["destination_dataset_id"]._options = None +_TRANSFERRUN.fields_by_name["data_source_id"]._options = None +_TRANSFERRUN.fields_by_name["schedule"]._options = None +_TRANSFERRUN._options = None # @@protoc_insertion_point(module_scope) diff --git a/bigquery_datatransfer/synth.metadata b/bigquery_datatransfer/synth.metadata index 1643741ae8a9..8ab46cb47b0b 100644 --- a/bigquery_datatransfer/synth.metadata +++ b/bigquery_datatransfer/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-08-03T12:11:16.935211Z", + "updateTime": "2019-09-27T12:12:10.005793Z", "sources": [ { "generator": { "name": "artman", - "version": "0.32.1", - "dockerImage": "googleapis/artman@sha256:a684d40ba9a4e15946f5f2ca6b4bd9fe301192f522e9de4fff622118775f309b" + "version": "0.37.1", + "dockerImage": "googleapis/artman@sha256:6068f67900a3f0bdece596b97bda8fc70406ca0e137a941f4c81d3217c994a80" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "7b212a8d2319cd81a7b6942c25dbf4550480a06c", - "internalRef": "261339454" + "sha": "cd112d8d255e0099df053643d4bd12c228ef7b1b", + "internalRef": "271468707" } }, { diff --git a/bigquery_storage/google/cloud/bigquery_storage_v1beta1/gapic/big_query_storage_client.py b/bigquery_storage/google/cloud/bigquery_storage_v1beta1/gapic/big_query_storage_client.py index e44e74830031..95e08647313f 100644 --- a/bigquery_storage/google/cloud/bigquery_storage_v1beta1/gapic/big_query_storage_client.py +++ b/bigquery_storage/google/cloud/bigquery_storage_v1beta1/gapic/big_query_storage_client.py @@ -241,18 +241,18 @@ def create_read_session( parent (str): Required. String of the form ``projects/{project_id}`` indicating the project this ReadSession is associated with. This is the project that will be billed for usage. - table_modifiers (Union[dict, ~google.cloud.bigquery_storage_v1beta1.types.TableModifiers]): Optional. Any modifiers to the Table (e.g. snapshot timestamp). + table_modifiers (Union[dict, ~google.cloud.bigquery_storage_v1beta1.types.TableModifiers]): Any modifiers to the Table (e.g. snapshot timestamp). If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.bigquery_storage_v1beta1.types.TableModifiers` - requested_streams (int): Optional. Initial number of streams. If unset or 0, we will + requested_streams (int): Initial number of streams. If unset or 0, we will provide a value of streams so as to produce reasonable throughput. Must be non-negative. The number of streams may be lower than the requested number, depending on the amount parallelism that is reasonable for the table and the maximum amount of parallelism allowed by the system. Streams must be read starting from offset 0. - read_options (Union[dict, ~google.cloud.bigquery_storage_v1beta1.types.TableReadOptions]): Optional. Read options for this session (e.g. column selection, filters). + read_options (Union[dict, ~google.cloud.bigquery_storage_v1beta1.types.TableReadOptions]): Read options for this session (e.g. column selection, filters). If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.bigquery_storage_v1beta1.types.TableReadOptions` diff --git a/bigquery_storage/google/cloud/bigquery_storage_v1beta1/gapic/transports/big_query_storage_grpc_transport.py b/bigquery_storage/google/cloud/bigquery_storage_v1beta1/gapic/transports/big_query_storage_grpc_transport.py index bf3b8b6f5b24..d799b3c21edf 100644 --- a/bigquery_storage/google/cloud/bigquery_storage_v1beta1/gapic/transports/big_query_storage_grpc_transport.py +++ b/bigquery_storage/google/cloud/bigquery_storage_v1beta1/gapic/transports/big_query_storage_grpc_transport.py @@ -33,6 +33,7 @@ class BigQueryStorageGrpcTransport(object): # in this service. _OAUTH_SCOPES = ( "https://www.googleapis.com/auth/bigquery", + "https://www.googleapis.com/auth/bigquery.readonly", "https://www.googleapis.com/auth/cloud-platform", ) diff --git a/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/storage.proto b/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/storage.proto index fa2de616d4f7..22f742fbb654 100644 --- a/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/storage.proto +++ b/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/storage.proto @@ -18,13 +18,15 @@ syntax = "proto3"; package google.cloud.bigquery.storage.v1beta1; import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; import "google/cloud/bigquery/storage/v1beta1/arrow.proto"; import "google/cloud/bigquery/storage/v1beta1/avro.proto"; import "google/cloud/bigquery/storage/v1beta1/read_options.proto"; import "google/cloud/bigquery/storage/v1beta1/table_reference.proto"; import "google/protobuf/empty.proto"; import "google/protobuf/timestamp.proto"; -import "google/api/client.proto"; option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1;storage"; option java_package = "com.google.cloud.bigquery.storage.v1beta1"; @@ -36,6 +38,7 @@ service BigQueryStorage { option (google.api.default_host) = "bigquerystorage.googleapis.com"; option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/bigquery," + "https://www.googleapis.com/auth/bigquery.readonly," "https://www.googleapis.com/auth/cloud-platform"; // Creates a new read session. A read session divides the contents of a @@ -59,6 +62,7 @@ service BigQueryStorage { body: "*" } }; + option (google.api.method_signature) = "table_reference,parent,requested_streams"; } // Reads rows from the table in the format prescribed by the read session. @@ -74,6 +78,7 @@ service BigQueryStorage { option (google.api.http) = { get: "/v1beta1/{read_position.stream.name=projects/*/streams/*}" }; + option (google.api.method_signature) = "read_position"; } // Creates additional streams for a ReadSession. This API can be used to @@ -84,6 +89,7 @@ service BigQueryStorage { post: "/v1beta1/{session.name=projects/*/sessions/*}" body: "*" }; + option (google.api.method_signature) = "session,requested_streams"; } // Triggers the graceful termination of a single stream in a ReadSession. This @@ -105,6 +111,7 @@ service BigQueryStorage { post: "/v1beta1/{stream.name=projects/*/streams/*}" body: "*" }; + option (google.api.method_signature) = "stream"; } // Splits a given read stream into two Streams. These streams are referred to @@ -124,11 +131,17 @@ service BigQueryStorage { option (google.api.http) = { get: "/v1beta1/{original_stream.name=projects/*/streams/*}" }; + option (google.api.method_signature) = "original_stream"; } } // Information about a single data stream within a read session. message Stream { + option (google.api.resource) = { + type: "bigquerystorage.googleapis.com/Stream" + pattern: "projects/{project}/locations/{location}/streams/{stream}" + }; + // Name of the stream, in the form // `projects/{project_id}/locations/{location}/streams/{stream_id}`. string name = 1; @@ -145,6 +158,11 @@ message StreamPosition { // Information returned from a `CreateReadSession` request. message ReadSession { + option (google.api.resource) = { + type: "bigquerystorage.googleapis.com/ReadSession" + pattern: "projects/{project}/locations/{location}/sessions/{session}" + }; + // Unique identifier for the session, in the form // `projects/{project_id}/locations/{location}/sessions/{session_id}`. string name = 1; @@ -181,17 +199,17 @@ message ReadSession { // requested parallelism, projection filters and constraints. message CreateReadSessionRequest { // Required. Reference to the table to read. - TableReference table_reference = 1; + TableReference table_reference = 1 [(google.api.field_behavior) = REQUIRED]; // Required. String of the form `projects/{project_id}` indicating the // project this ReadSession is associated with. This is the project that will // be billed for usage. - string parent = 6; + string parent = 6 [(google.api.field_behavior) = REQUIRED]; - // Optional. Any modifiers to the Table (e.g. snapshot timestamp). + // Any modifiers to the Table (e.g. snapshot timestamp). TableModifiers table_modifiers = 2; - // Optional. Initial number of streams. If unset or 0, we will + // Initial number of streams. If unset or 0, we will // provide a value of streams so as to produce reasonable throughput. Must be // non-negative. The number of streams may be lower than the requested number, // depending on the amount parallelism that is reasonable for the table and @@ -200,7 +218,7 @@ message CreateReadSessionRequest { // Streams must be read starting from offset 0. int32 requested_streams = 3; - // Optional. Read options for this session (e.g. column selection, filters). + // Read options for this session (e.g. column selection, filters). TableReadOptions read_options = 4; // Data output format. Currently default to Avro. @@ -248,7 +266,7 @@ message ReadRowsRequest { // Required. Identifier of the position in the stream to start reading from. // The offset requested must be less than the last row read from ReadRows. // Requesting a larger offset is undefined. - StreamPosition read_position = 1; + StreamPosition read_position = 1 [(google.api.field_behavior) = REQUIRED]; } // Progress information for a given Stream. @@ -267,6 +285,12 @@ message StreamStatus { // sharding strategy. float fraction_consumed = 2; + // Represents the progress of the current stream. + // + // Note: This value is under development and should not be used. Use + // `fraction_consumed` instead. + Progress progress = 4; + // Whether this stream can be split. For sessions that use the LIQUID sharding // strategy, this value is always false. For BALANCED sessions, this value is // false when enough data have been read such that no more splits are possible @@ -275,6 +299,25 @@ message StreamStatus { bool is_splittable = 3; } +message Progress { + // The fraction of rows assigned to the stream that have been processed by the + // server so far, not including the rows in the current response message. + // + // This value, along with `at_response_end`, can be used to interpolate the + // progress made as the rows in the message are being processed using the + // following formula: `at_response_start + (at_response_end - + // at_response_start) * rows_processed_from_response / rows_in_response`. + // + // Note that if a filter is provided, the `at_response_end` value of the + // previous response may not necessarily be equal to the `at_response_start` + // value of the current response. + float at_response_start = 1; + + // Similar to `at_response_start`, except that this value includes the rows in + // the current response. + float at_response_end = 2; +} + // Information on if the current connection is being throttled. message ThrottleStatus { // How much this connection is being throttled. @@ -313,12 +356,12 @@ message ReadRowsResponse { message BatchCreateReadSessionStreamsRequest { // Required. Must be a non-expired session obtained from a call to // CreateReadSession. Only the name field needs to be set. - ReadSession session = 1; + ReadSession session = 1 [(google.api.field_behavior) = REQUIRED]; // Required. Number of new streams requested. Must be positive. // Number of added streams may be less than this, see CreateReadSessionRequest // for more information. - int32 requested_streams = 2; + int32 requested_streams = 2 [(google.api.field_behavior) = REQUIRED]; } // The response from `BatchCreateReadSessionStreams` returns the stream diff --git a/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/storage_pb2.py b/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/storage_pb2.py index ec546f3f9e81..500d277c6cf5 100644 --- a/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/storage_pb2.py +++ b/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/storage_pb2.py @@ -17,6 +17,9 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 +from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 from google.cloud.bigquery_storage_v1beta1.proto import ( arrow_pb2 as google_dot_cloud_dot_bigquery_dot_storage__v1beta1_dot_proto_dot_arrow__pb2, ) @@ -31,7 +34,6 @@ ) from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -from google.api import client_pb2 as google_dot_api_dot_client__pb2 DESCRIPTOR = _descriptor.FileDescriptor( @@ -42,17 +44,19 @@ "\n)com.google.cloud.bigquery.storage.v1beta1ZLgoogle.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1;storage" ), serialized_pb=_b( - '\n9google/cloud/bigquery/storage_v1beta1/proto/storage.proto\x12%google.cloud.bigquery.storage.v1beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x37google/cloud/bigquery/storage_v1beta1/proto/arrow.proto\x1a\x36google/cloud/bigquery/storage_v1beta1/proto/avro.proto\x1a>google/cloud/bigquery/storage_v1beta1/proto/read_options.proto\x1a\x41google/cloud/bigquery/storage_v1beta1/proto/table_reference.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/api/client.proto"\x16\n\x06Stream\x12\x0c\n\x04name\x18\x01 \x01(\t"_\n\x0eStreamPosition\x12=\n\x06stream\x18\x01 \x01(\x0b\x32-.google.cloud.bigquery.storage.v1beta1.Stream\x12\x0e\n\x06offset\x18\x02 \x01(\x03"\xa0\x04\n\x0bReadSession\x12\x0c\n\x04name\x18\x01 \x01(\t\x12/\n\x0b\x65xpire_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12H\n\x0b\x61vro_schema\x18\x05 \x01(\x0b\x32\x31.google.cloud.bigquery.storage.v1beta1.AvroSchemaH\x00\x12J\n\x0c\x61rrow_schema\x18\x06 \x01(\x0b\x32\x32.google.cloud.bigquery.storage.v1beta1.ArrowSchemaH\x00\x12>\n\x07streams\x18\x04 \x03(\x0b\x32-.google.cloud.bigquery.storage.v1beta1.Stream\x12N\n\x0ftable_reference\x18\x07 \x01(\x0b\x32\x35.google.cloud.bigquery.storage.v1beta1.TableReference\x12N\n\x0ftable_modifiers\x18\x08 \x01(\x0b\x32\x35.google.cloud.bigquery.storage.v1beta1.TableModifiers\x12R\n\x11sharding_strategy\x18\t \x01(\x0e\x32\x37.google.cloud.bigquery.storage.v1beta1.ShardingStrategyB\x08\n\x06schema"\xcb\x03\n\x18\x43reateReadSessionRequest\x12N\n\x0ftable_reference\x18\x01 \x01(\x0b\x32\x35.google.cloud.bigquery.storage.v1beta1.TableReference\x12\x0e\n\x06parent\x18\x06 \x01(\t\x12N\n\x0ftable_modifiers\x18\x02 \x01(\x0b\x32\x35.google.cloud.bigquery.storage.v1beta1.TableModifiers\x12\x19\n\x11requested_streams\x18\x03 \x01(\x05\x12M\n\x0cread_options\x18\x04 \x01(\x0b\x32\x37.google.cloud.bigquery.storage.v1beta1.TableReadOptions\x12\x41\n\x06\x66ormat\x18\x05 \x01(\x0e\x32\x31.google.cloud.bigquery.storage.v1beta1.DataFormat\x12R\n\x11sharding_strategy\x18\x07 \x01(\x0e\x32\x37.google.cloud.bigquery.storage.v1beta1.ShardingStrategy"_\n\x0fReadRowsRequest\x12L\n\rread_position\x18\x01 \x01(\x0b\x32\x35.google.cloud.bigquery.storage.v1beta1.StreamPosition"]\n\x0cStreamStatus\x12\x1b\n\x13\x65stimated_row_count\x18\x01 \x01(\x03\x12\x19\n\x11\x66raction_consumed\x18\x02 \x01(\x02\x12\x15\n\ris_splittable\x18\x03 \x01(\x08"*\n\x0eThrottleStatus\x12\x18\n\x10throttle_percent\x18\x01 \x01(\x05"\xdf\x02\n\x10ReadRowsResponse\x12\x44\n\tavro_rows\x18\x03 \x01(\x0b\x32/.google.cloud.bigquery.storage.v1beta1.AvroRowsH\x00\x12U\n\x12\x61rrow_record_batch\x18\x04 \x01(\x0b\x32\x37.google.cloud.bigquery.storage.v1beta1.ArrowRecordBatchH\x00\x12\x11\n\trow_count\x18\x06 \x01(\x03\x12\x43\n\x06status\x18\x02 \x01(\x0b\x32\x33.google.cloud.bigquery.storage.v1beta1.StreamStatus\x12N\n\x0fthrottle_status\x18\x05 \x01(\x0b\x32\x35.google.cloud.bigquery.storage.v1beta1.ThrottleStatusB\x06\n\x04rows"\x86\x01\n$BatchCreateReadSessionStreamsRequest\x12\x43\n\x07session\x18\x01 \x01(\x0b\x32\x32.google.cloud.bigquery.storage.v1beta1.ReadSession\x12\x19\n\x11requested_streams\x18\x02 \x01(\x05"g\n%BatchCreateReadSessionStreamsResponse\x12>\n\x07streams\x18\x01 \x03(\x0b\x32-.google.cloud.bigquery.storage.v1beta1.Stream"V\n\x15\x46inalizeStreamRequest\x12=\n\x06stream\x18\x02 \x01(\x0b\x32-.google.cloud.bigquery.storage.v1beta1.Stream"r\n\x16SplitReadStreamRequest\x12\x46\n\x0foriginal_stream\x18\x01 \x01(\x0b\x32-.google.cloud.bigquery.storage.v1beta1.Stream\x12\x10\n\x08\x66raction\x18\x02 \x01(\x02"\xa9\x01\n\x17SplitReadStreamResponse\x12\x45\n\x0eprimary_stream\x18\x01 \x01(\x0b\x32-.google.cloud.bigquery.storage.v1beta1.Stream\x12G\n\x10remainder_stream\x18\x02 \x01(\x0b\x32-.google.cloud.bigquery.storage.v1beta1.Stream*>\n\nDataFormat\x12\x1b\n\x17\x44\x41TA_FORMAT_UNSPECIFIED\x10\x00\x12\x08\n\x04\x41VRO\x10\x01\x12\t\n\x05\x41RROW\x10\x03*O\n\x10ShardingStrategy\x12!\n\x1dSHARDING_STRATEGY_UNSPECIFIED\x10\x00\x12\n\n\x06LIQUID\x10\x01\x12\x0c\n\x08\x42\x41LANCED\x10\x02\x32\xc4\t\n\x0f\x42igQueryStorage\x12\x87\x02\n\x11\x43reateReadSession\x12?.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest\x1a\x32.google.cloud.bigquery.storage.v1beta1.ReadSession"}\x82\xd3\xe4\x93\x02w"0/v1beta1/{table_reference.project_id=projects/*}:\x01*Z@";/v1beta1/{table_reference.dataset_id=projects/*/datasets/*}:\x01*\x12\xc0\x01\n\x08ReadRows\x12\x36.google.cloud.bigquery.storage.v1beta1.ReadRowsRequest\x1a\x37.google.cloud.bigquery.storage.v1beta1.ReadRowsResponse"A\x82\xd3\xe4\x93\x02;\x12\x39/v1beta1/{read_position.stream.name=projects/*/streams/*}0\x01\x12\xf4\x01\n\x1d\x42\x61tchCreateReadSessionStreams\x12K.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest\x1aL.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse"8\x82\xd3\xe4\x93\x02\x32"-/v1beta1/{session.name=projects/*/sessions/*}:\x01*\x12\x9e\x01\n\x0e\x46inalizeStream\x12<.google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest\x1a\x16.google.protobuf.Empty"6\x82\xd3\xe4\x93\x02\x30"+/v1beta1/{stream.name=projects/*/streams/*}:\x01*\x12\xce\x01\n\x0fSplitReadStream\x12=.google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest\x1a>.google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse"<\x82\xd3\xe4\x93\x02\x36\x12\x34/v1beta1/{original_stream.name=projects/*/streams/*}\x1a{\xca\x41\x1e\x62igquerystorage.googleapis.com\xd2\x41Whttps://www.googleapis.com/auth/bigquery,https://www.googleapis.com/auth/cloud-platformBy\n)com.google.cloud.bigquery.storage.v1beta1ZLgoogle.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1;storageb\x06proto3' + '\n9google/cloud/bigquery/storage_v1beta1/proto/storage.proto\x12%google.cloud.bigquery.storage.v1beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a\x37google/cloud/bigquery/storage_v1beta1/proto/arrow.proto\x1a\x36google/cloud/bigquery/storage_v1beta1/proto/avro.proto\x1a>google/cloud/bigquery/storage_v1beta1/proto/read_options.proto\x1a\x41google/cloud/bigquery/storage_v1beta1/proto/table_reference.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1fgoogle/protobuf/timestamp.proto"|\n\x06Stream\x12\x0c\n\x04name\x18\x01 \x01(\t:d\xea\x41\x61\n%bigquerystorage.googleapis.com/Stream\x12\x38projects/{project}/locations/{location}/streams/{stream}"_\n\x0eStreamPosition\x12=\n\x06stream\x18\x01 \x01(\x0b\x32-.google.cloud.bigquery.storage.v1beta1.Stream\x12\x0e\n\x06offset\x18\x02 \x01(\x03"\x8d\x05\n\x0bReadSession\x12\x0c\n\x04name\x18\x01 \x01(\t\x12/\n\x0b\x65xpire_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12H\n\x0b\x61vro_schema\x18\x05 \x01(\x0b\x32\x31.google.cloud.bigquery.storage.v1beta1.AvroSchemaH\x00\x12J\n\x0c\x61rrow_schema\x18\x06 \x01(\x0b\x32\x32.google.cloud.bigquery.storage.v1beta1.ArrowSchemaH\x00\x12>\n\x07streams\x18\x04 \x03(\x0b\x32-.google.cloud.bigquery.storage.v1beta1.Stream\x12N\n\x0ftable_reference\x18\x07 \x01(\x0b\x32\x35.google.cloud.bigquery.storage.v1beta1.TableReference\x12N\n\x0ftable_modifiers\x18\x08 \x01(\x0b\x32\x35.google.cloud.bigquery.storage.v1beta1.TableModifiers\x12R\n\x11sharding_strategy\x18\t \x01(\x0e\x32\x37.google.cloud.bigquery.storage.v1beta1.ShardingStrategy:k\xea\x41h\n*bigquerystorage.googleapis.com/ReadSession\x12:projects/{project}/locations/{location}/sessions/{session}B\x08\n\x06schema"\xd5\x03\n\x18\x43reateReadSessionRequest\x12S\n\x0ftable_reference\x18\x01 \x01(\x0b\x32\x35.google.cloud.bigquery.storage.v1beta1.TableReferenceB\x03\xe0\x41\x02\x12\x13\n\x06parent\x18\x06 \x01(\tB\x03\xe0\x41\x02\x12N\n\x0ftable_modifiers\x18\x02 \x01(\x0b\x32\x35.google.cloud.bigquery.storage.v1beta1.TableModifiers\x12\x19\n\x11requested_streams\x18\x03 \x01(\x05\x12M\n\x0cread_options\x18\x04 \x01(\x0b\x32\x37.google.cloud.bigquery.storage.v1beta1.TableReadOptions\x12\x41\n\x06\x66ormat\x18\x05 \x01(\x0e\x32\x31.google.cloud.bigquery.storage.v1beta1.DataFormat\x12R\n\x11sharding_strategy\x18\x07 \x01(\x0e\x32\x37.google.cloud.bigquery.storage.v1beta1.ShardingStrategy"d\n\x0fReadRowsRequest\x12Q\n\rread_position\x18\x01 \x01(\x0b\x32\x35.google.cloud.bigquery.storage.v1beta1.StreamPositionB\x03\xe0\x41\x02"\xa0\x01\n\x0cStreamStatus\x12\x1b\n\x13\x65stimated_row_count\x18\x01 \x01(\x03\x12\x19\n\x11\x66raction_consumed\x18\x02 \x01(\x02\x12\x41\n\x08progress\x18\x04 \x01(\x0b\x32/.google.cloud.bigquery.storage.v1beta1.Progress\x12\x15\n\ris_splittable\x18\x03 \x01(\x08">\n\x08Progress\x12\x19\n\x11\x61t_response_start\x18\x01 \x01(\x02\x12\x17\n\x0f\x61t_response_end\x18\x02 \x01(\x02"*\n\x0eThrottleStatus\x12\x18\n\x10throttle_percent\x18\x01 \x01(\x05"\xdf\x02\n\x10ReadRowsResponse\x12\x44\n\tavro_rows\x18\x03 \x01(\x0b\x32/.google.cloud.bigquery.storage.v1beta1.AvroRowsH\x00\x12U\n\x12\x61rrow_record_batch\x18\x04 \x01(\x0b\x32\x37.google.cloud.bigquery.storage.v1beta1.ArrowRecordBatchH\x00\x12\x11\n\trow_count\x18\x06 \x01(\x03\x12\x43\n\x06status\x18\x02 \x01(\x0b\x32\x33.google.cloud.bigquery.storage.v1beta1.StreamStatus\x12N\n\x0fthrottle_status\x18\x05 \x01(\x0b\x32\x35.google.cloud.bigquery.storage.v1beta1.ThrottleStatusB\x06\n\x04rows"\x90\x01\n$BatchCreateReadSessionStreamsRequest\x12H\n\x07session\x18\x01 \x01(\x0b\x32\x32.google.cloud.bigquery.storage.v1beta1.ReadSessionB\x03\xe0\x41\x02\x12\x1e\n\x11requested_streams\x18\x02 \x01(\x05\x42\x03\xe0\x41\x02"g\n%BatchCreateReadSessionStreamsResponse\x12>\n\x07streams\x18\x01 \x03(\x0b\x32-.google.cloud.bigquery.storage.v1beta1.Stream"V\n\x15\x46inalizeStreamRequest\x12=\n\x06stream\x18\x02 \x01(\x0b\x32-.google.cloud.bigquery.storage.v1beta1.Stream"r\n\x16SplitReadStreamRequest\x12\x46\n\x0foriginal_stream\x18\x01 \x01(\x0b\x32-.google.cloud.bigquery.storage.v1beta1.Stream\x12\x10\n\x08\x66raction\x18\x02 \x01(\x02"\xa9\x01\n\x17SplitReadStreamResponse\x12\x45\n\x0eprimary_stream\x18\x01 \x01(\x0b\x32-.google.cloud.bigquery.storage.v1beta1.Stream\x12G\n\x10remainder_stream\x18\x02 \x01(\x0b\x32-.google.cloud.bigquery.storage.v1beta1.Stream*>\n\nDataFormat\x12\x1b\n\x17\x44\x41TA_FORMAT_UNSPECIFIED\x10\x00\x12\x08\n\x04\x41VRO\x10\x01\x12\t\n\x05\x41RROW\x10\x03*O\n\x10ShardingStrategy\x12!\n\x1dSHARDING_STRATEGY_UNSPECIFIED\x10\x00\x12\n\n\x06LIQUID\x10\x01\x12\x0c\n\x08\x42\x41LANCED\x10\x02\x32\xeb\n\n\x0f\x42igQueryStorage\x12\xb3\x02\n\x11\x43reateReadSession\x12?.google.cloud.bigquery.storage.v1beta1.CreateReadSessionRequest\x1a\x32.google.cloud.bigquery.storage.v1beta1.ReadSession"\xa8\x01\x82\xd3\xe4\x93\x02w"0/v1beta1/{table_reference.project_id=projects/*}:\x01*Z@";/v1beta1/{table_reference.dataset_id=projects/*/datasets/*}:\x01*\xda\x41(table_reference,parent,requested_streams\x12\xd0\x01\n\x08ReadRows\x12\x36.google.cloud.bigquery.storage.v1beta1.ReadRowsRequest\x1a\x37.google.cloud.bigquery.storage.v1beta1.ReadRowsResponse"Q\x82\xd3\xe4\x93\x02;\x12\x39/v1beta1/{read_position.stream.name=projects/*/streams/*}\xda\x41\rread_position0\x01\x12\x90\x02\n\x1d\x42\x61tchCreateReadSessionStreams\x12K.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsRequest\x1aL.google.cloud.bigquery.storage.v1beta1.BatchCreateReadSessionStreamsResponse"T\x82\xd3\xe4\x93\x02\x32"-/v1beta1/{session.name=projects/*/sessions/*}:\x01*\xda\x41\x19session,requested_streams\x12\xa7\x01\n\x0e\x46inalizeStream\x12<.google.cloud.bigquery.storage.v1beta1.FinalizeStreamRequest\x1a\x16.google.protobuf.Empty"?\x82\xd3\xe4\x93\x02\x30"+/v1beta1/{stream.name=projects/*/streams/*}:\x01*\xda\x41\x06stream\x12\xe0\x01\n\x0fSplitReadStream\x12=.google.cloud.bigquery.storage.v1beta1.SplitReadStreamRequest\x1a>.google.cloud.bigquery.storage.v1beta1.SplitReadStreamResponse"N\x82\xd3\xe4\x93\x02\x36\x12\x34/v1beta1/{original_stream.name=projects/*/streams/*}\xda\x41\x0foriginal_stream\x1a\xae\x01\xca\x41\x1e\x62igquerystorage.googleapis.com\xd2\x41\x89\x01https://www.googleapis.com/auth/bigquery,https://www.googleapis.com/auth/bigquery.readonly,https://www.googleapis.com/auth/cloud-platformBy\n)com.google.cloud.bigquery.storage.v1beta1ZLgoogle.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1;storageb\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_client__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, + google_dot_api_dot_resource__pb2.DESCRIPTOR, google_dot_cloud_dot_bigquery_dot_storage__v1beta1_dot_proto_dot_arrow__pb2.DESCRIPTOR, google_dot_cloud_dot_bigquery_dot_storage__v1beta1_dot_proto_dot_avro__pb2.DESCRIPTOR, google_dot_cloud_dot_bigquery_dot_storage__v1beta1_dot_proto_dot_read__options__pb2.DESCRIPTOR, google_dot_cloud_dot_bigquery_dot_storage__v1beta1_dot_proto_dot_table__reference__pb2.DESCRIPTOR, google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, - google_dot_api_dot_client__pb2.DESCRIPTOR, ], ) @@ -78,8 +82,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=2799, - serialized_end=2861, + serialized_start=3227, + serialized_end=3289, ) _sym_db.RegisterEnumDescriptor(_DATAFORMAT) @@ -106,8 +110,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=2863, - serialized_end=2942, + serialized_start=3291, + serialized_end=3370, ) _sym_db.RegisterEnumDescriptor(_SHARDINGSTRATEGY) @@ -149,13 +153,15 @@ extensions=[], nested_types=[], enum_types=[], - serialized_options=None, + serialized_options=_b( + "\352Aa\n%bigquerystorage.googleapis.com/Stream\0228projects/{project}/locations/{location}/streams/{stream}" + ), is_extendable=False, syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=461, - serialized_end=483, + serialized_start=521, + serialized_end=645, ) @@ -211,8 +217,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=485, - serialized_end=580, + serialized_start=647, + serialized_end=742, ) @@ -371,7 +377,9 @@ extensions=[], nested_types=[], enum_types=[], - serialized_options=None, + serialized_options=_b( + "\352Ah\n*bigquerystorage.googleapis.com/ReadSession\022:projects/{project}/locations/{location}/sessions/{session}" + ), is_extendable=False, syntax="proto3", extension_ranges=[], @@ -384,8 +392,8 @@ fields=[], ) ], - serialized_start=583, - serialized_end=1127, + serialized_start=745, + serialized_end=1398, ) @@ -411,7 +419,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -429,7 +437,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -531,8 +539,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1130, - serialized_end=1589, + serialized_start=1401, + serialized_end=1870, ) @@ -558,7 +566,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ) ], @@ -570,8 +578,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1591, - serialized_end=1686, + serialized_start=1872, + serialized_end=1972, ) @@ -618,10 +626,28 @@ serialized_options=None, file=DESCRIPTOR, ), + _descriptor.FieldDescriptor( + name="progress", + full_name="google.cloud.bigquery.storage.v1beta1.StreamStatus.progress", + index=2, + number=4, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), _descriptor.FieldDescriptor( name="is_splittable", full_name="google.cloud.bigquery.storage.v1beta1.StreamStatus.is_splittable", - index=2, + index=3, number=3, type=8, cpp_type=7, @@ -645,8 +671,65 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1688, - serialized_end=1781, + serialized_start=1975, + serialized_end=2135, +) + + +_PROGRESS = _descriptor.Descriptor( + name="Progress", + full_name="google.cloud.bigquery.storage.v1beta1.Progress", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="at_response_start", + full_name="google.cloud.bigquery.storage.v1beta1.Progress.at_response_start", + index=0, + number=1, + type=2, + cpp_type=6, + label=1, + has_default_value=False, + default_value=float(0), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="at_response_end", + full_name="google.cloud.bigquery.storage.v1beta1.Progress.at_response_end", + index=1, + number=2, + type=2, + cpp_type=6, + label=1, + has_default_value=False, + default_value=float(0), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2137, + serialized_end=2199, ) @@ -684,8 +767,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1783, - serialized_end=1825, + serialized_start=2201, + serialized_end=2243, ) @@ -803,8 +886,8 @@ fields=[], ) ], - serialized_start=1828, - serialized_end=2179, + serialized_start=2246, + serialized_end=2597, ) @@ -830,7 +913,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -848,7 +931,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -860,8 +943,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2182, - serialized_end=2316, + serialized_start=2600, + serialized_end=2744, ) @@ -899,8 +982,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2318, - serialized_end=2421, + serialized_start=2746, + serialized_end=2849, ) @@ -938,8 +1021,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2423, - serialized_end=2509, + serialized_start=2851, + serialized_end=2937, ) @@ -995,8 +1078,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2511, - serialized_end=2625, + serialized_start=2939, + serialized_end=3053, ) @@ -1052,8 +1135,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2628, - serialized_end=2797, + serialized_start=3056, + serialized_end=3225, ) _STREAMPOSITION.fields_by_name["stream"].message_type = _STREAM @@ -1114,6 +1197,7 @@ "sharding_strategy" ].enum_type = _SHARDINGSTRATEGY _READROWSREQUEST.fields_by_name["read_position"].message_type = _STREAMPOSITION +_STREAMSTATUS.fields_by_name["progress"].message_type = _PROGRESS _READROWSRESPONSE.fields_by_name[ "avro_rows" ].message_type = ( @@ -1152,6 +1236,7 @@ DESCRIPTOR.message_types_by_name["CreateReadSessionRequest"] = _CREATEREADSESSIONREQUEST DESCRIPTOR.message_types_by_name["ReadRowsRequest"] = _READROWSREQUEST DESCRIPTOR.message_types_by_name["StreamStatus"] = _STREAMSTATUS +DESCRIPTOR.message_types_by_name["Progress"] = _PROGRESS DESCRIPTOR.message_types_by_name["ThrottleStatus"] = _THROTTLESTATUS DESCRIPTOR.message_types_by_name["ReadRowsResponse"] = _READROWSRESPONSE DESCRIPTOR.message_types_by_name[ @@ -1263,19 +1348,18 @@ indicating the project this ReadSession is associated with. This is the project that will be billed for usage. table_modifiers: - Optional. Any modifiers to the Table (e.g. snapshot - timestamp). + Any modifiers to the Table (e.g. snapshot timestamp). requested_streams: - Optional. Initial number of streams. If unset or 0, we will - provide a value of streams so as to produce reasonable - throughput. Must be non-negative. The number of streams may be - lower than the requested number, depending on the amount - parallelism that is reasonable for the table and the maximum - amount of parallelism allowed by the system. Streams must be - read starting from offset 0. + Initial number of streams. If unset or 0, we will provide a + value of streams so as to produce reasonable throughput. Must + be non-negative. The number of streams may be lower than the + requested number, depending on the amount parallelism that is + reasonable for the table and the maximum amount of parallelism + allowed by the system. Streams must be read starting from + offset 0. read_options: - Optional. Read options for this session (e.g. column - selection, filters). + Read options for this session (e.g. column selection, + filters). format: Data output format. Currently default to Avro. sharding_strategy: @@ -1331,6 +1415,10 @@ progress through the pre-filtering rows. This value is only populated for sessions created through the BALANCED sharding strategy. + progress: + Represents the progress of the current stream. Note: This + value is under development and should not be used. Use + ``fraction_consumed`` instead. is_splittable: Whether this stream can be split. For sessions that use the LIQUID sharding strategy, this value is always false. For @@ -1344,6 +1432,36 @@ ) _sym_db.RegisterMessage(StreamStatus) +Progress = _reflection.GeneratedProtocolMessageType( + "Progress", + (_message.Message,), + dict( + DESCRIPTOR=_PROGRESS, + __module__="google.cloud.bigquery.storage_v1beta1.proto.storage_pb2", + __doc__="""Protocol buffer. + + Attributes: + at_response_start: + The fraction of rows assigned to the stream that have been + processed by the server so far, not including the rows in the + current response message. This value, along with + ``at_response_end``, can be used to interpolate the progress + made as the rows in the message are being processed using the + following formula: ``at_response_start + (at_response_end - + at_response_start) * rows_processed_from_response / + rows_in_response``. Note that if a filter is provided, the + ``at_response_end`` value of the previous response may not + necessarily be equal to the ``at_response_start`` value of the + current response. + at_response_end: + Similar to ``at_response_start``, except that this value + includes the rows in the current response. + """, + # @@protoc_insertion_point(class_scope:google.cloud.bigquery.storage.v1beta1.Progress) + ), +) +_sym_db.RegisterMessage(Progress) + ThrottleStatus = _reflection.GeneratedProtocolMessageType( "ThrottleStatus", (_message.Message,), @@ -1513,6 +1631,15 @@ DESCRIPTOR._options = None +_STREAM._options = None +_READSESSION._options = None +_CREATEREADSESSIONREQUEST.fields_by_name["table_reference"]._options = None +_CREATEREADSESSIONREQUEST.fields_by_name["parent"]._options = None +_READROWSREQUEST.fields_by_name["read_position"]._options = None +_BATCHCREATEREADSESSIONSTREAMSREQUEST.fields_by_name["session"]._options = None +_BATCHCREATEREADSESSIONSTREAMSREQUEST.fields_by_name[ + "requested_streams" +]._options = None _BIGQUERYSTORAGE = _descriptor.ServiceDescriptor( name="BigQueryStorage", @@ -1520,10 +1647,10 @@ file=DESCRIPTOR, index=0, serialized_options=_b( - "\312A\036bigquerystorage.googleapis.com\322AWhttps://www.googleapis.com/auth/bigquery,https://www.googleapis.com/auth/cloud-platform" + "\312A\036bigquerystorage.googleapis.com\322A\211\001https://www.googleapis.com/auth/bigquery,https://www.googleapis.com/auth/bigquery.readonly,https://www.googleapis.com/auth/cloud-platform" ), - serialized_start=2945, - serialized_end=4165, + serialized_start=3373, + serialized_end=4760, methods=[ _descriptor.MethodDescriptor( name="CreateReadSession", @@ -1533,7 +1660,7 @@ input_type=_CREATEREADSESSIONREQUEST, output_type=_READSESSION, serialized_options=_b( - '\202\323\344\223\002w"0/v1beta1/{table_reference.project_id=projects/*}:\001*Z@";/v1beta1/{table_reference.dataset_id=projects/*/datasets/*}:\001*' + '\202\323\344\223\002w"0/v1beta1/{table_reference.project_id=projects/*}:\001*Z@";/v1beta1/{table_reference.dataset_id=projects/*/datasets/*}:\001*\332A(table_reference,parent,requested_streams' ), ), _descriptor.MethodDescriptor( @@ -1544,7 +1671,7 @@ input_type=_READROWSREQUEST, output_type=_READROWSRESPONSE, serialized_options=_b( - "\202\323\344\223\002;\0229/v1beta1/{read_position.stream.name=projects/*/streams/*}" + "\202\323\344\223\002;\0229/v1beta1/{read_position.stream.name=projects/*/streams/*}\332A\rread_position" ), ), _descriptor.MethodDescriptor( @@ -1555,7 +1682,7 @@ input_type=_BATCHCREATEREADSESSIONSTREAMSREQUEST, output_type=_BATCHCREATEREADSESSIONSTREAMSRESPONSE, serialized_options=_b( - '\202\323\344\223\0022"-/v1beta1/{session.name=projects/*/sessions/*}:\001*' + '\202\323\344\223\0022"-/v1beta1/{session.name=projects/*/sessions/*}:\001*\332A\031session,requested_streams' ), ), _descriptor.MethodDescriptor( @@ -1566,7 +1693,7 @@ input_type=_FINALIZESTREAMREQUEST, output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, serialized_options=_b( - '\202\323\344\223\0020"+/v1beta1/{stream.name=projects/*/streams/*}:\001*' + '\202\323\344\223\0020"+/v1beta1/{stream.name=projects/*/streams/*}:\001*\332A\006stream' ), ), _descriptor.MethodDescriptor( @@ -1577,7 +1704,7 @@ input_type=_SPLITREADSTREAMREQUEST, output_type=_SPLITREADSTREAMRESPONSE, serialized_options=_b( - "\202\323\344\223\0026\0224/v1beta1/{original_stream.name=projects/*/streams/*}" + "\202\323\344\223\0026\0224/v1beta1/{original_stream.name=projects/*/streams/*}\332A\017original_stream" ), ), ], diff --git a/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/table_reference.proto b/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/table_reference.proto index fb74bbf6c642..a55dc48eb023 100644 --- a/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/table_reference.proto +++ b/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/table_reference.proto @@ -17,6 +17,7 @@ syntax = "proto3"; package google.cloud.bigquery.storage.v1beta1; +import "google/api/resource.proto"; import "google/protobuf/timestamp.proto"; option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1;storage"; diff --git a/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/table_reference_pb2.py b/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/table_reference_pb2.py index 30c85aa2f469..992067f07367 100644 --- a/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/table_reference_pb2.py +++ b/bigquery_storage/google/cloud/bigquery_storage_v1beta1/proto/table_reference_pb2.py @@ -15,6 +15,7 @@ _sym_db = _symbol_database.Default() +from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 @@ -26,9 +27,12 @@ "\n)com.google.cloud.bigquery.storage.v1beta1B\023TableReferenceProtoZLgoogle.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1;storage" ), serialized_pb=_b( - '\nAgoogle/cloud/bigquery/storage_v1beta1/proto/table_reference.proto\x12%google.cloud.bigquery.storage.v1beta1\x1a\x1fgoogle/protobuf/timestamp.proto"J\n\x0eTableReference\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x12\n\ndataset_id\x18\x02 \x01(\t\x12\x10\n\x08table_id\x18\x03 \x01(\t"C\n\x0eTableModifiers\x12\x31\n\rsnapshot_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x8e\x01\n)com.google.cloud.bigquery.storage.v1beta1B\x13TableReferenceProtoZLgoogle.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1;storageb\x06proto3' + '\nAgoogle/cloud/bigquery/storage_v1beta1/proto/table_reference.proto\x12%google.cloud.bigquery.storage.v1beta1\x1a\x19google/api/resource.proto\x1a\x1fgoogle/protobuf/timestamp.proto"J\n\x0eTableReference\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x12\n\ndataset_id\x18\x02 \x01(\t\x12\x10\n\x08table_id\x18\x03 \x01(\t"C\n\x0eTableModifiers\x12\x31\n\rsnapshot_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x8e\x01\n)com.google.cloud.bigquery.storage.v1beta1B\x13TableReferenceProtoZLgoogle.golang.org/genproto/googleapis/cloud/bigquery/storage/v1beta1;storageb\x06proto3' ), - dependencies=[google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR], + dependencies=[ + google_dot_api_dot_resource__pb2.DESCRIPTOR, + google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, + ], ) @@ -102,8 +106,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=141, - serialized_end=215, + serialized_start=168, + serialized_end=242, ) @@ -141,8 +145,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=217, - serialized_end=284, + serialized_start=244, + serialized_end=311, ) _TABLEMODIFIERS.fields_by_name[ diff --git a/bigquery_storage/synth.metadata b/bigquery_storage/synth.metadata index 5695e76c7e81..e23a8d443a90 100644 --- a/bigquery_storage/synth.metadata +++ b/bigquery_storage/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-08-21T23:20:45.275738Z", + "updateTime": "2019-09-27T14:19:13.569391Z", "sources": [ { "generator": { "name": "artman", - "version": "0.34.0", - "dockerImage": "googleapis/artman@sha256:38a27ba6245f96c3e86df7acb2ebcc33b4f186d9e475efe2d64303aec3d4e0ea" + "version": "0.37.1", + "dockerImage": "googleapis/artman@sha256:6068f67900a3f0bdece596b97bda8fc70406ca0e137a941f4c81d3217c994a80" } }, { "git": { "name": "googleapis", - "remote": "git@github.com:googleapis/googleapis.git", - "sha": "92bebf78345af8b2d3585220527115bda8bdedf8", - "internalRef": "264715111" + "remote": "https://github.com/googleapis/googleapis.git", + "sha": "cd112d8d255e0099df053643d4bd12c228ef7b1b", + "internalRef": "271468707" } }, { diff --git a/bigquery_storage/synth.py b/bigquery_storage/synth.py index cbbeffd02acf..0866ae8eeb98 100644 --- a/bigquery_storage/synth.py +++ b/bigquery_storage/synth.py @@ -117,6 +117,13 @@ r"google.api_core.grpc_helpers.create_channel\(", "google.api_core.grpc_helpers.create_channel( # pragma: no cover", ) + +# Fix up proto docs that are missing summary line. +s.replace( + "google/cloud/bigquery_storage_v1beta1/proto/storage_pb2.py", + '"""Attributes:', + '"""Protocol buffer.\n\n Attributes:', +) # END: Ignore lint and coverage # ---------------------------------------------------------------------------- diff --git a/bigtable/CHANGELOG.md b/bigtable/CHANGELOG.md index 80eaff1617b3..70e61063c488 100644 --- a/bigtable/CHANGELOG.md +++ b/bigtable/CHANGELOG.md @@ -4,6 +4,21 @@ [1]: https://pypi.org/project/google-cloud-bigtable/#history +## 1.1.0 + +10-15-2019 06:40 PDT + + +### New Features +- Add IAM Policy methods to table admin client (via synth). ([#9172](https://github.com/googleapis/google-cloud-python/pull/9172)) + +### Dependencies +- Pin 'google-cloud-core >= 1.0.3, < 2.0.0dev'. ([#9445](https://github.com/googleapis/google-cloud-python/pull/9445)) + +### Documentation +- Fix intersphinx reference to requests ([#9294](https://github.com/googleapis/google-cloud-python/pull/9294)) +- Fix misspelling in docs. ([#9184](https://github.com/googleapis/google-cloud-python/pull/9184)) + ## 1.0.0 08-28-2019 12:49 PDT diff --git a/bigtable/google/cloud/bigtable/client.py b/bigtable/google/cloud/bigtable/client.py index f9a625b15843..8a8315623cae 100644 --- a/bigtable/google/cloud/bigtable/client.py +++ b/bigtable/google/cloud/bigtable/client.py @@ -60,11 +60,13 @@ """Scope for reading table data.""" -def _create_gapic_client(client_class): +def _create_gapic_client(client_class, client_options=None): def inner(self): if self._emulator_host is None: return client_class( - credentials=self._credentials, client_info=self._client_info + credentials=self._credentials, + client_info=self._client_info, + client_options=client_options, ) else: return client_class( @@ -109,6 +111,17 @@ class Client(ClientWithProject): you only need to set this if you're developing your own library or partner tool. + :type client_options: :class:`~google.api_core.client_options.ClientOptions` + or :class:`dict` + :param client_options: (Optional) Client options used to set user options + on the client. API Endpoint should be set through client_options. + + :type admin_client_options: + :class:`~google.api_core.client_options.ClientOptions` or :class:`dict` + :param admin_client_options: (Optional) Client options used to set user + options on the client. API Endpoint for admin operations should be set + through admin_client_options. + :type channel: :instance: grpc.Channel :param channel (grpc.Channel): (Optional) DEPRECATED: A ``Channel`` instance through which to make calls. @@ -130,6 +143,8 @@ def __init__( read_only=False, admin=False, client_info=_CLIENT_INFO, + client_options=None, + admin_client_options=None, channel=None, ): if read_only and admin: @@ -155,6 +170,8 @@ def __init__( stacklevel=2, ) + self._client_options = client_options + self._admin_client_options = admin_client_options self._channel = channel self.SCOPE = self._get_scopes() super(Client, self).__init__(project=project, credentials=credentials) @@ -213,9 +230,10 @@ def table_data_client(self): :returns: A BigtableClient object. """ if self._table_data_client is None: - self._table_data_client = _create_gapic_client(bigtable_v2.BigtableClient)( - self + klass = _create_gapic_client( + bigtable_v2.BigtableClient, client_options=self._client_options ) + self._table_data_client = klass(self) return self._table_data_client @property @@ -237,9 +255,11 @@ def table_admin_client(self): if self._table_admin_client is None: if not self._admin: raise ValueError("Client is not an admin client.") - self._table_admin_client = _create_gapic_client( - bigtable_admin_v2.BigtableTableAdminClient - )(self) + klass = _create_gapic_client( + bigtable_admin_v2.BigtableTableAdminClient, + client_options=self._admin_client_options, + ) + self._table_admin_client = klass(self) return self._table_admin_client @property @@ -261,9 +281,11 @@ def instance_admin_client(self): if self._instance_admin_client is None: if not self._admin: raise ValueError("Client is not an admin client.") - self._instance_admin_client = _create_gapic_client( - bigtable_admin_v2.BigtableInstanceAdminClient - )(self) + klass = _create_gapic_client( + bigtable_admin_v2.BigtableInstanceAdminClient, + client_options=self._admin_client_options, + ) + self._instance_admin_client = klass(self) return self._instance_admin_client def instance(self, instance_id, display_name=None, instance_type=None, labels=None): diff --git a/bigtable/google/cloud/bigtable_admin_v2/__init__.py b/bigtable/google/cloud/bigtable_admin_v2/__init__.py index 501d8f24d3e1..021abe2ce82b 100644 --- a/bigtable/google/cloud/bigtable_admin_v2/__init__.py +++ b/bigtable/google/cloud/bigtable_admin_v2/__init__.py @@ -35,4 +35,9 @@ class BigtableTableAdminClient(bigtable_table_admin_client.BigtableTableAdminCli enums = enums -__all__ = ("enums", "types", "BigtableInstanceAdminClient", "BigtableTableAdminClient") +__all__ = ( + "enums", + "types", + "BigtableInstanceAdminClient", + "BigtableTableAdminClient", +) diff --git a/bigtable/google/cloud/bigtable_admin_v2/gapic/bigtable_instance_admin_client.py b/bigtable/google/cloud/bigtable_admin_v2/gapic/bigtable_instance_admin_client.py index fed633c8dc6b..c0bac0768dcf 100644 --- a/bigtable/google/cloud/bigtable_admin_v2/gapic/bigtable_instance_admin_client.py +++ b/bigtable/google/cloud/bigtable_admin_v2/gapic/bigtable_instance_admin_client.py @@ -49,7 +49,9 @@ from google.protobuf import field_mask_pb2 -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-bigtable").version +_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( + "google-cloud-bigtable", +).version class BigtableInstanceAdminClient(object): @@ -128,7 +130,7 @@ def location_path(cls, project, location): def project_path(cls, project): """Return a fully-qualified project string.""" return google.api_core.path_template.expand( - "projects/{project}", project=project + "projects/{project}", project=project, ) def __init__( @@ -218,12 +220,12 @@ def __init__( self.transport = transport else: self.transport = bigtable_instance_admin_grpc_transport.BigtableInstanceAdminGrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials + address=api_endpoint, channel=channel, credentials=credentials, ) if client_info is None: client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION + gapic_version=_GAPIC_LIBRARY_VERSION, ) else: client_info.gapic_version = _GAPIC_LIBRARY_VERSION @@ -234,7 +236,7 @@ def __init__( # (Ordinarily, these are the defaults specified in the `*_config.py` # file next to this one.) self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config["interfaces"][self._INTERFACE_NAME] + client_config["interfaces"][self._INTERFACE_NAME], ) # Save a dictionary of cached API call functions. @@ -333,7 +335,10 @@ def create_instance( ) request = bigtable_instance_admin_pb2.CreateInstanceRequest( - parent=parent, instance_id=instance_id, instance=instance, clusters=clusters + parent=parent, + instance_id=instance_id, + instance=instance, + clusters=clusters, ) if metadata is None: metadata = [] @@ -410,7 +415,7 @@ def get_instance( client_info=self._client_info, ) - request = bigtable_instance_admin_pb2.GetInstanceRequest(name=name) + request = bigtable_instance_admin_pb2.GetInstanceRequest(name=name,) if metadata is None: metadata = [] metadata = list(metadata) @@ -483,7 +488,7 @@ def list_instances( ) request = bigtable_instance_admin_pb2.ListInstancesRequest( - parent=parent, page_token=page_token + parent=parent, page_token=page_token, ) if metadata is None: metadata = [] @@ -586,7 +591,11 @@ def update_instance( ) request = instance_pb2.Instance( - name=name, display_name=display_name, type=type_, labels=labels, state=state + name=name, + display_name=display_name, + type=type_, + labels=labels, + state=state, ) if metadata is None: metadata = [] @@ -679,7 +688,7 @@ def partial_update_instance( ) request = bigtable_instance_admin_pb2.PartialUpdateInstanceRequest( - instance=instance, update_mask=update_mask + instance=instance, update_mask=update_mask, ) if metadata is None: metadata = [] @@ -753,7 +762,7 @@ def delete_instance( client_info=self._client_info, ) - request = bigtable_instance_admin_pb2.DeleteInstanceRequest(name=name) + request = bigtable_instance_admin_pb2.DeleteInstanceRequest(name=name,) if metadata is None: metadata = [] metadata = list(metadata) @@ -849,7 +858,7 @@ def create_cluster( ) request = bigtable_instance_admin_pb2.CreateClusterRequest( - parent=parent, cluster_id=cluster_id, cluster=cluster + parent=parent, cluster_id=cluster_id, cluster=cluster, ) if metadata is None: metadata = [] @@ -926,7 +935,7 @@ def get_cluster( client_info=self._client_info, ) - request = bigtable_instance_admin_pb2.GetClusterRequest(name=name) + request = bigtable_instance_admin_pb2.GetClusterRequest(name=name,) if metadata is None: metadata = [] metadata = list(metadata) @@ -1002,7 +1011,7 @@ def list_clusters( ) request = bigtable_instance_admin_pb2.ListClustersRequest( - parent=parent, page_token=page_token + parent=parent, page_token=page_token, ) if metadata is None: metadata = [] @@ -1177,7 +1186,7 @@ def delete_cluster( client_info=self._client_info, ) - request = bigtable_instance_admin_pb2.DeleteClusterRequest(name=name) + request = bigtable_instance_admin_pb2.DeleteClusterRequest(name=name,) if metadata is None: metadata = [] metadata = list(metadata) @@ -1340,7 +1349,7 @@ def get_app_profile( client_info=self._client_info, ) - request = bigtable_instance_admin_pb2.GetAppProfileRequest(name=name) + request = bigtable_instance_admin_pb2.GetAppProfileRequest(name=name,) if metadata is None: metadata = [] metadata = list(metadata) @@ -1432,7 +1441,7 @@ def list_app_profiles( ) request = bigtable_instance_admin_pb2.ListAppProfilesRequest( - parent=parent, page_size=page_size + parent=parent, page_size=page_size, ) if metadata is None: metadata = [] @@ -1620,7 +1629,7 @@ def delete_app_profile( ) request = bigtable_instance_admin_pb2.DeleteAppProfileRequest( - name=name, ignore_warnings=ignore_warnings + name=name, ignore_warnings=ignore_warnings, ) if metadata is None: metadata = [] @@ -1699,7 +1708,7 @@ def get_iam_policy( ) request = iam_policy_pb2.GetIamPolicyRequest( - resource=resource, options=options_ + resource=resource, options=options_, ) if metadata is None: metadata = [] @@ -1782,7 +1791,7 @@ def set_iam_policy( client_info=self._client_info, ) - request = iam_policy_pb2.SetIamPolicyRequest(resource=resource, policy=policy) + request = iam_policy_pb2.SetIamPolicyRequest(resource=resource, policy=policy,) if metadata is None: metadata = [] metadata = list(metadata) @@ -1861,7 +1870,7 @@ def test_iam_permissions( ) request = iam_policy_pb2.TestIamPermissionsRequest( - resource=resource, permissions=permissions + resource=resource, permissions=permissions, ) if metadata is None: metadata = [] diff --git a/bigtable/google/cloud/bigtable_admin_v2/gapic/bigtable_table_admin_client.py b/bigtable/google/cloud/bigtable_admin_v2/gapic/bigtable_table_admin_client.py index 96026779dbf1..bdc3f1a88749 100644 --- a/bigtable/google/cloud/bigtable_admin_v2/gapic/bigtable_table_admin_client.py +++ b/bigtable/google/cloud/bigtable_admin_v2/gapic/bigtable_table_admin_client.py @@ -54,7 +54,9 @@ from google.protobuf import field_mask_pb2 -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-bigtable").version +_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( + "google-cloud-bigtable", +).version class BigtableTableAdminClient(object): @@ -220,12 +222,12 @@ def __init__( self.transport = transport else: self.transport = bigtable_table_admin_grpc_transport.BigtableTableAdminGrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials + address=api_endpoint, channel=channel, credentials=credentials, ) if client_info is None: client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION + gapic_version=_GAPIC_LIBRARY_VERSION, ) else: client_info.gapic_version = _GAPIC_LIBRARY_VERSION @@ -236,7 +238,7 @@ def __init__( # (Ordinarily, these are the defaults specified in the `*_config.py` # file next to this one.) self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config["interfaces"][self._INTERFACE_NAME] + client_config["interfaces"][self._INTERFACE_NAME], ) # Save a dictionary of cached API call functions. @@ -337,7 +339,10 @@ def create_table( ) request = bigtable_table_admin_pb2.CreateTableRequest( - parent=parent, table_id=table_id, table=table, initial_splits=initial_splits + parent=parent, + table_id=table_id, + table=table, + initial_splits=initial_splits, ) if metadata is None: metadata = [] @@ -439,7 +444,7 @@ def create_table_from_snapshot( ) request = bigtable_table_admin_pb2.CreateTableFromSnapshotRequest( - parent=parent, table_id=table_id, source_snapshot=source_snapshot + parent=parent, table_id=table_id, source_snapshot=source_snapshot, ) if metadata is None: metadata = [] @@ -538,7 +543,7 @@ def list_tables( ) request = bigtable_table_admin_pb2.ListTablesRequest( - parent=parent, view=view, page_size=page_size + parent=parent, view=view, page_size=page_size, ) if metadata is None: metadata = [] @@ -623,7 +628,7 @@ def get_table( client_info=self._client_info, ) - request = bigtable_table_admin_pb2.GetTableRequest(name=name, view=view) + request = bigtable_table_admin_pb2.GetTableRequest(name=name, view=view,) if metadata is None: metadata = [] metadata = list(metadata) @@ -690,7 +695,7 @@ def delete_table( client_info=self._client_info, ) - request = bigtable_table_admin_pb2.DeleteTableRequest(name=name) + request = bigtable_table_admin_pb2.DeleteTableRequest(name=name,) if metadata is None: metadata = [] metadata = list(metadata) @@ -776,7 +781,7 @@ def modify_column_families( ) request = bigtable_table_admin_pb2.ModifyColumnFamiliesRequest( - name=name, modifications=modifications + name=name, modifications=modifications, ) if metadata is None: metadata = [] @@ -939,7 +944,7 @@ def generate_consistency_token( client_info=self._client_info, ) - request = bigtable_table_admin_pb2.GenerateConsistencyTokenRequest(name=name) + request = bigtable_table_admin_pb2.GenerateConsistencyTokenRequest(name=name,) if metadata is None: metadata = [] metadata = list(metadata) @@ -1018,7 +1023,7 @@ def check_consistency( ) request = bigtable_table_admin_pb2.CheckConsistencyRequest( - name=name, consistency_token=consistency_token + name=name, consistency_token=consistency_token, ) if metadata is None: metadata = [] @@ -1097,7 +1102,7 @@ def get_iam_policy( ) request = iam_policy_pb2.GetIamPolicyRequest( - resource=resource, options=options_ + resource=resource, options=options_, ) if metadata is None: metadata = [] @@ -1180,7 +1185,7 @@ def set_iam_policy( client_info=self._client_info, ) - request = iam_policy_pb2.SetIamPolicyRequest(resource=resource, policy=policy) + request = iam_policy_pb2.SetIamPolicyRequest(resource=resource, policy=policy,) if metadata is None: metadata = [] metadata = list(metadata) @@ -1259,7 +1264,7 @@ def test_iam_permissions( ) request = iam_policy_pb2.TestIamPermissionsRequest( - resource=resource, permissions=permissions + resource=resource, permissions=permissions, ) if metadata is None: metadata = [] @@ -1462,7 +1467,7 @@ def get_snapshot( client_info=self._client_info, ) - request = bigtable_table_admin_pb2.GetSnapshotRequest(name=name) + request = bigtable_table_admin_pb2.GetSnapshotRequest(name=name,) if metadata is None: metadata = [] metadata = list(metadata) @@ -1563,7 +1568,7 @@ def list_snapshots( ) request = bigtable_table_admin_pb2.ListSnapshotsRequest( - parent=parent, page_size=page_size + parent=parent, page_size=page_size, ) if metadata is None: metadata = [] @@ -1648,7 +1653,7 @@ def delete_snapshot( client_info=self._client_info, ) - request = bigtable_table_admin_pb2.DeleteSnapshotRequest(name=name) + request = bigtable_table_admin_pb2.DeleteSnapshotRequest(name=name,) if metadata is None: metadata = [] metadata = list(metadata) diff --git a/bigtable/google/cloud/bigtable_admin_v2/gapic/transports/bigtable_instance_admin_grpc_transport.py b/bigtable/google/cloud/bigtable_admin_v2/gapic/transports/bigtable_instance_admin_grpc_transport.py index afb72e0c8ab9..3482193864b1 100644 --- a/bigtable/google/cloud/bigtable_admin_v2/gapic/transports/bigtable_instance_admin_grpc_transport.py +++ b/bigtable/google/cloud/bigtable_admin_v2/gapic/transports/bigtable_instance_admin_grpc_transport.py @@ -64,7 +64,7 @@ def __init__( # exception (channels come with credentials baked in already). if channel is not None and credentials is not None: raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive." + "The `channel` and `credentials` arguments are mutually " "exclusive.", ) # Create the channel. @@ -85,7 +85,7 @@ def __init__( self._stubs = { "bigtable_instance_admin_stub": bigtable_instance_admin_pb2_grpc.BigtableInstanceAdminStub( channel - ) + ), } # Because this API includes a method that returns a diff --git a/bigtable/google/cloud/bigtable_admin_v2/gapic/transports/bigtable_table_admin_grpc_transport.py b/bigtable/google/cloud/bigtable_admin_v2/gapic/transports/bigtable_table_admin_grpc_transport.py index 5d93e555b3b9..08e70e48b31b 100644 --- a/bigtable/google/cloud/bigtable_admin_v2/gapic/transports/bigtable_table_admin_grpc_transport.py +++ b/bigtable/google/cloud/bigtable_admin_v2/gapic/transports/bigtable_table_admin_grpc_transport.py @@ -64,7 +64,7 @@ def __init__( # exception (channels come with credentials baked in already). if channel is not None and credentials is not None: raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive." + "The `channel` and `credentials` arguments are mutually " "exclusive.", ) # Create the channel. @@ -85,7 +85,7 @@ def __init__( self._stubs = { "bigtable_table_admin_stub": bigtable_table_admin_pb2_grpc.BigtableTableAdminStub( channel - ) + ), } # Because this API includes a method that returns a diff --git a/bigtable/google/cloud/bigtable_admin_v2/proto/bigtable_instance_admin_pb2.py b/bigtable/google/cloud/bigtable_admin_v2/proto/bigtable_instance_admin_pb2.py index 01d3fa7e3a4d..5f0601ac2026 100644 --- a/bigtable/google/cloud/bigtable_admin_v2/proto/bigtable_instance_admin_pb2.py +++ b/bigtable/google/cloud/bigtable_admin_v2/proto/bigtable_instance_admin_pb2.py @@ -189,7 +189,7 @@ ), ], extensions=[], - nested_types=[_CREATEINSTANCEREQUEST_CLUSTERSENTRY], + nested_types=[_CREATEINSTANCEREQUEST_CLUSTERSENTRY,], enum_types=[], serialized_options=None, is_extendable=False, @@ -225,7 +225,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -453,7 +453,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -567,7 +567,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -738,7 +738,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -1170,7 +1170,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], diff --git a/bigtable/google/cloud/bigtable_admin_v2/proto/bigtable_table_admin_pb2.py b/bigtable/google/cloud/bigtable_admin_v2/proto/bigtable_table_admin_pb2.py index c81637a34f25..f2a95d546ac3 100644 --- a/bigtable/google/cloud/bigtable_admin_v2/proto/bigtable_table_admin_pb2.py +++ b/bigtable/google/cloud/bigtable_admin_v2/proto/bigtable_table_admin_pb2.py @@ -76,7 +76,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -171,7 +171,7 @@ ), ], extensions=[], - nested_types=[_CREATETABLEREQUEST_SPLIT], + nested_types=[_CREATETABLEREQUEST_SPLIT,], enum_types=[], serialized_options=None, is_extendable=False, @@ -334,7 +334,7 @@ index=0, containing_type=None, fields=[], - ) + ), ], serialized_start=660, serialized_end=769, @@ -572,7 +572,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -681,7 +681,7 @@ index=0, containing_type=None, fields=[], - ) + ), ], serialized_start=1254, serialized_end=1419, @@ -732,7 +732,7 @@ ), ], extensions=[], - nested_types=[_MODIFYCOLUMNFAMILIESREQUEST_MODIFICATION], + nested_types=[_MODIFYCOLUMNFAMILIESREQUEST_MODIFICATION,], enum_types=[], serialized_options=None, is_extendable=False, @@ -768,7 +768,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -807,7 +807,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -903,7 +903,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -1053,7 +1053,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -1224,7 +1224,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], diff --git a/bigtable/google/cloud/bigtable_admin_v2/proto/instance_pb2.py b/bigtable/google/cloud/bigtable_admin_v2/proto/instance_pb2.py index 49164dfe6693..ef3a7ce7858b 100644 --- a/bigtable/google/cloud/bigtable_admin_v2/proto/instance_pb2.py +++ b/bigtable/google/cloud/bigtable_admin_v2/proto/instance_pb2.py @@ -281,8 +281,8 @@ ), ], extensions=[], - nested_types=[_INSTANCE_LABELSENTRY], - enum_types=[_INSTANCE_STATE, _INSTANCE_TYPE], + nested_types=[_INSTANCE_LABELSENTRY,], + enum_types=[_INSTANCE_STATE, _INSTANCE_TYPE,], serialized_options=None, is_extendable=False, syntax="proto3", @@ -393,7 +393,7 @@ ], extensions=[], nested_types=[], - enum_types=[_CLUSTER_STATE], + enum_types=[_CLUSTER_STATE,], serialized_options=None, is_extendable=False, syntax="proto3", @@ -594,7 +594,7 @@ index=0, containing_type=None, fields=[], - ) + ), ], serialized_start=826, serialized_end=1212, diff --git a/bigtable/google/cloud/bigtable_admin_v2/proto/table_pb2.py b/bigtable/google/cloud/bigtable_admin_v2/proto/table_pb2.py index e15dd2ba5b3f..c348fe4a280f 100644 --- a/bigtable/google/cloud/bigtable_admin_v2/proto/table_pb2.py +++ b/bigtable/google/cloud/bigtable_admin_v2/proto/table_pb2.py @@ -192,11 +192,11 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], - enum_types=[_TABLE_CLUSTERSTATE_REPLICATIONSTATE], + enum_types=[_TABLE_CLUSTERSTATE_REPLICATIONSTATE,], serialized_options=None, is_extendable=False, syntax="proto3", @@ -404,7 +404,7 @@ _TABLE_CLUSTERSTATESENTRY, _TABLE_COLUMNFAMILIESENTRY, ], - enum_types=[_TABLE_TIMESTAMPGRANULARITY, _TABLE_VIEW], + enum_types=[_TABLE_TIMESTAMPGRANULARITY, _TABLE_VIEW,], serialized_options=None, is_extendable=False, syntax="proto3", @@ -439,7 +439,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -478,7 +478,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -516,7 +516,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -611,7 +611,7 @@ ), ], extensions=[], - nested_types=[_GCRULE_INTERSECTION, _GCRULE_UNION], + nested_types=[_GCRULE_INTERSECTION, _GCRULE_UNION,], enum_types=[], serialized_options=None, is_extendable=False, @@ -624,7 +624,7 @@ index=0, containing_type=None, fields=[], - ) + ), ], serialized_start=1087, serialized_end=1428, @@ -767,7 +767,7 @@ ], extensions=[], nested_types=[], - enum_types=[_SNAPSHOT_STATE], + enum_types=[_SNAPSHOT_STATE,], serialized_options=None, is_extendable=False, syntax="proto3", diff --git a/bigtable/google/cloud/bigtable_v2/__init__.py b/bigtable/google/cloud/bigtable_v2/__init__.py index ca18668ce49b..216ef8fb1daa 100644 --- a/bigtable/google/cloud/bigtable_v2/__init__.py +++ b/bigtable/google/cloud/bigtable_v2/__init__.py @@ -25,4 +25,7 @@ class BigtableClient(bigtable_client.BigtableClient): __doc__ = bigtable_client.BigtableClient.__doc__ -__all__ = ("types", "BigtableClient") +__all__ = ( + "types", + "BigtableClient", +) diff --git a/bigtable/google/cloud/bigtable_v2/gapic/bigtable_client.py b/bigtable/google/cloud/bigtable_v2/gapic/bigtable_client.py index 36021068dfd8..b13faac448c1 100644 --- a/bigtable/google/cloud/bigtable_v2/gapic/bigtable_client.py +++ b/bigtable/google/cloud/bigtable_v2/gapic/bigtable_client.py @@ -36,7 +36,9 @@ from google.cloud.bigtable_v2.proto import data_pb2 -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-bigtable").version +_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( + "google-cloud-bigtable", +).version class BigtableClient(object): @@ -166,12 +168,12 @@ def __init__( self.transport = transport else: self.transport = bigtable_grpc_transport.BigtableGrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials + address=api_endpoint, channel=channel, credentials=credentials, ) if client_info is None: client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION + gapic_version=_GAPIC_LIBRARY_VERSION, ) else: client_info.gapic_version = _GAPIC_LIBRARY_VERSION @@ -182,7 +184,7 @@ def __init__( # (Ordinarily, these are the defaults specified in the `*_config.py` # file next to this one.) self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config["interfaces"][self._INTERFACE_NAME] + client_config["interfaces"][self._INTERFACE_NAME], ) # Save a dictionary of cached API call functions. @@ -352,7 +354,7 @@ def sample_row_keys( ) request = bigtable_pb2.SampleRowKeysRequest( - table_name=table_name, app_profile_id=app_profile_id + table_name=table_name, app_profile_id=app_profile_id, ) if metadata is None: metadata = [] @@ -537,7 +539,7 @@ def mutate_rows( ) request = bigtable_pb2.MutateRowsRequest( - table_name=table_name, entries=entries, app_profile_id=app_profile_id + table_name=table_name, entries=entries, app_profile_id=app_profile_id, ) if metadata is None: metadata = [] diff --git a/bigtable/google/cloud/bigtable_v2/gapic/transports/bigtable_grpc_transport.py b/bigtable/google/cloud/bigtable_v2/gapic/transports/bigtable_grpc_transport.py index 4c34d5fb1b39..3c30df704a57 100644 --- a/bigtable/google/cloud/bigtable_v2/gapic/transports/bigtable_grpc_transport.py +++ b/bigtable/google/cloud/bigtable_v2/gapic/transports/bigtable_grpc_transport.py @@ -60,7 +60,7 @@ def __init__( # exception (channels come with credentials baked in already). if channel is not None and credentials is not None: raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive." + "The `channel` and `credentials` arguments are mutually " "exclusive.", ) # Create the channel. @@ -78,7 +78,9 @@ def __init__( # gRPC uses objects called "stubs" that are bound to the # channel and provide a basic method for each RPC. - self._stubs = {"bigtable_stub": bigtable_pb2_grpc.BigtableStub(channel)} + self._stubs = { + "bigtable_stub": bigtable_pb2_grpc.BigtableStub(channel), + } @classmethod def create_channel( diff --git a/bigtable/google/cloud/bigtable_v2/proto/bigtable_pb2.py b/bigtable/google/cloud/bigtable_v2/proto/bigtable_pb2.py index 1c2b0f1ae134..4e4ab84e1cc8 100644 --- a/bigtable/google/cloud/bigtable_v2/proto/bigtable_pb2.py +++ b/bigtable/google/cloud/bigtable_v2/proto/bigtable_pb2.py @@ -337,7 +337,7 @@ index=0, containing_type=None, fields=[], - ) + ), ], serialized_start=488, serialized_end=749, @@ -388,7 +388,7 @@ ), ], extensions=[], - nested_types=[_READROWSRESPONSE_CELLCHUNK], + nested_types=[_READROWSRESPONSE_CELLCHUNK,], enum_types=[], serialized_options=None, is_extendable=False, @@ -746,7 +746,7 @@ ), ], extensions=[], - nested_types=[_MUTATEROWSREQUEST_ENTRY], + nested_types=[_MUTATEROWSREQUEST_ENTRY,], enum_types=[], serialized_options=None, is_extendable=False, @@ -838,10 +838,10 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], - nested_types=[_MUTATEROWSRESPONSE_ENTRY], + nested_types=[_MUTATEROWSRESPONSE_ENTRY,], enum_types=[], serialized_options=None, is_extendable=False, @@ -1006,7 +1006,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -1138,7 +1138,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], diff --git a/bigtable/google/cloud/bigtable_v2/proto/data_pb2.py b/bigtable/google/cloud/bigtable_v2/proto/data_pb2.py index 8e5cff816455..825a0fa9222f 100644 --- a/bigtable/google/cloud/bigtable_v2/proto/data_pb2.py +++ b/bigtable/google/cloud/bigtable_v2/proto/data_pb2.py @@ -754,7 +754,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -792,7 +792,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -1231,7 +1231,7 @@ ), ], extensions=[], - nested_types=[_ROWFILTER_CHAIN, _ROWFILTER_INTERLEAVE, _ROWFILTER_CONDITION], + nested_types=[_ROWFILTER_CHAIN, _ROWFILTER_INTERLEAVE, _ROWFILTER_CONDITION,], enum_types=[], serialized_options=None, is_extendable=False, @@ -1244,7 +1244,7 @@ index=0, containing_type=None, fields=[], - ) + ), ], serialized_start=991, serialized_end=2110, @@ -1441,7 +1441,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -1573,7 +1573,7 @@ index=0, containing_type=None, fields=[], - ) + ), ], serialized_start=2113, serialized_end=2698, @@ -1674,7 +1674,7 @@ index=0, containing_type=None, fields=[], - ) + ), ], serialized_start=2701, serialized_end=2829, diff --git a/bigtable/google/cloud/bigtable_v2/types.py b/bigtable/google/cloud/bigtable_v2/types.py index 53937c1d1687..a445eae1cade 100644 --- a/bigtable/google/cloud/bigtable_v2/types.py +++ b/bigtable/google/cloud/bigtable_v2/types.py @@ -27,9 +27,16 @@ from google.rpc import status_pb2 -_shared_modules = [any_pb2, wrappers_pb2, status_pb2] - -_local_modules = [bigtable_pb2, data_pb2] +_shared_modules = [ + any_pb2, + wrappers_pb2, + status_pb2, +] + +_local_modules = [ + bigtable_pb2, + data_pb2, +] names = [] diff --git a/bigtable/setup.py b/bigtable/setup.py index 26956b393471..82c3aa499dcd 100644 --- a/bigtable/setup.py +++ b/bigtable/setup.py @@ -22,15 +22,15 @@ name = 'google-cloud-bigtable' description = 'Google Cloud Bigtable API client library' -version = '1.0.0' +version = '1.1.0' # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' # 'Development Status :: 5 - Production/Stable' release_status = 'Development Status :: 5 - Production/Stable' dependencies = [ - 'google-api-core[grpc] >= 1.14.0, < 2.0.0dev', - "google-cloud-core >= 1.0.0, < 2.0dev", + "google-api-core[grpc] >= 1.14.0, < 2.0.0dev", + "google-cloud-core >= 1.0.3, < 2.0dev", "grpc-google-iam-v1 >= 0.12.3, < 0.13dev", ] extras = { diff --git a/bigtable/synth.metadata b/bigtable/synth.metadata index a7291727fa94..25c44a96331d 100644 --- a/bigtable/synth.metadata +++ b/bigtable/synth.metadata @@ -1,26 +1,26 @@ { - "updateTime": "2019-09-04T12:14:03.458374Z", + "updateTime": "2019-10-29T12:15:54.915199Z", "sources": [ { "generator": { "name": "artman", - "version": "0.36.2", - "dockerImage": "googleapis/artman@sha256:0e6f3a668cd68afc768ecbe08817cf6e56a0e64fcbdb1c58c3b97492d12418a1" + "version": "0.40.3", + "dockerImage": "googleapis/artman@sha256:c805f50525f5f557886c94ab76f56eaa09cb1da58c3ee95111fd34259376621a" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "a2158681f6e30c5fd9446eb1fd7b5021a6d48bfa", - "internalRef": "266999433" + "sha": "532773acbed8d09451dafb3d403ab1823e6a6e1e", + "internalRef": "277177415" } }, { "template": { "name": "python_library", "origin": "synthtool.gcp", - "version": "2019.5.2" + "version": "2019.10.17" } } ], diff --git a/bigtable/tests/system.py b/bigtable/tests/system.py index 28d95d985ffe..ae43bb10ecdf 100644 --- a/bigtable/tests/system.py +++ b/bigtable/tests/system.py @@ -1076,8 +1076,8 @@ def test_read_with_label_applied(self): # Make sure COLUMN_FAMILY_ID1 was the only key. self.assertEqual(len(cells_returned), 0) - cell1_new, = col_fam1.pop(COL_NAME1) - cell3_new, = col_fam1.pop(COL_NAME2) + (cell1_new,) = col_fam1.pop(COL_NAME1) + (cell3_new,) = col_fam1.pop(COL_NAME2) # Make sure COL_NAME1 and COL_NAME2 were the only keys. self.assertEqual(len(col_fam1), 0) diff --git a/bigtable/tests/unit/test_client.py b/bigtable/tests/unit/test_client.py index 05a017d898af..8a2ef3c64b56 100644 --- a/bigtable/tests/unit/test_client.py +++ b/bigtable/tests/unit/test_client.py @@ -21,12 +21,12 @@ class Test__create_gapic_client(unittest.TestCase): - def _invoke_client_factory(self, client_class): + def _invoke_client_factory(self, client_class, **kw): from google.cloud.bigtable.client import _create_gapic_client - return _create_gapic_client(client_class) + return _create_gapic_client(client_class, **kw) - def test_without_emulator(self): + def test_wo_emulator(self): client_class = mock.Mock() credentials = _make_credentials() client = _Client(credentials) @@ -36,10 +36,30 @@ def test_without_emulator(self): self.assertIs(result, client_class.return_value) client_class.assert_called_once_with( - credentials=client._credentials, client_info=client_info + credentials=client._credentials, + client_info=client_info, + client_options=None, ) - def test_with_emulator(self): + def test_wo_emulator_w_client_options(self): + client_class = mock.Mock() + credentials = _make_credentials() + client = _Client(credentials) + client_info = client._client_info = mock.Mock() + client_options = mock.Mock() + + result = self._invoke_client_factory( + client_class, client_options=client_options + )(client) + + self.assertIs(result, client_class.return_value) + client_class.assert_called_once_with( + credentials=client._credentials, + client_info=client_info, + client_options=client_options, + ) + + def test_w_emulator(self): client_class = mock.Mock() emulator_host = emulator_channel = object() credentials = _make_credentials() @@ -210,6 +230,25 @@ def test_table_data_client_not_initialized_w_client_info(self): self.assertIs(table_data_client._client_info, client_info) self.assertIs(client._table_data_client, table_data_client) + def test_table_data_client_not_initialized_w_client_options(self): + credentials = _make_credentials() + client_options = mock.Mock() + client = self._make_one( + project=self.PROJECT, credentials=credentials, client_options=client_options + ) + + patch = mock.patch("google.cloud.bigtable_v2.BigtableClient") + with patch as mocked: + table_data_client = client.table_data_client + + self.assertIs(table_data_client, mocked.return_value) + self.assertIs(client._table_data_client, table_data_client) + mocked.assert_called_once_with( + client_info=client._client_info, + credentials=mock.ANY, # added scopes + client_options=client_options, + ) + def test_table_data_client_initialized(self): credentials = _make_credentials() client = self._make_one( @@ -257,6 +296,28 @@ def test_table_admin_client_not_initialized_w_client_info(self): self.assertIs(table_admin_client._client_info, client_info) self.assertIs(client._table_admin_client, table_admin_client) + def test_table_admin_client_not_initialized_w_client_options(self): + credentials = _make_credentials() + admin_client_options = mock.Mock() + client = self._make_one( + project=self.PROJECT, + credentials=credentials, + admin=True, + admin_client_options=admin_client_options, + ) + + patch = mock.patch("google.cloud.bigtable_admin_v2.BigtableTableAdminClient") + with patch as mocked: + table_admin_client = client.table_admin_client + + self.assertIs(table_admin_client, mocked.return_value) + self.assertIs(client._table_admin_client, table_admin_client) + mocked.assert_called_once_with( + client_info=client._client_info, + credentials=mock.ANY, # added scopes + client_options=admin_client_options, + ) + def test_table_admin_client_initialized(self): credentials = _make_credentials() client = self._make_one( @@ -287,7 +348,7 @@ def test_instance_admin_client_not_initialized_w_admin_flag(self): self.assertIs(instance_admin_client._client_info, _CLIENT_INFO) self.assertIs(client._instance_admin_client, instance_admin_client) - def test_instance_admin_client_not_initialized_w_admin_and_client_info(self): + def test_instance_admin_client_not_initialized_w_client_info(self): from google.cloud.bigtable_admin_v2 import BigtableInstanceAdminClient credentials = _make_credentials() @@ -304,6 +365,28 @@ def test_instance_admin_client_not_initialized_w_admin_and_client_info(self): self.assertIs(instance_admin_client._client_info, client_info) self.assertIs(client._instance_admin_client, instance_admin_client) + def test_instance_admin_client_not_initialized_w_client_options(self): + credentials = _make_credentials() + admin_client_options = mock.Mock() + client = self._make_one( + project=self.PROJECT, + credentials=credentials, + admin=True, + admin_client_options=admin_client_options, + ) + + patch = mock.patch("google.cloud.bigtable_admin_v2.BigtableInstanceAdminClient") + with patch as mocked: + instance_admin_client = client.instance_admin_client + + self.assertIs(instance_admin_client, mocked.return_value) + self.assertIs(client._instance_admin_client, instance_admin_client) + mocked.assert_called_once_with( + client_info=client._client_info, + credentials=mock.ANY, # added scopes + client_options=admin_client_options, + ) + def test_instance_admin_client_initialized(self): credentials = _make_credentials() client = self._make_one( diff --git a/containeranalysis/docs/conf.py b/containeranalysis/docs/conf.py index 80504154f98f..142b347b9169 100644 --- a/containeranalysis/docs/conf.py +++ b/containeranalysis/docs/conf.py @@ -344,7 +344,7 @@ "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("https://requests.kennethreitz.org/en/stable/", None), + "requests": ("https://requests.kennethreitz.org/en/master/", None), "fastavro": ("https://fastavro.readthedocs.io/en/stable/", None), "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), } diff --git a/containeranalysis/synth.metadata b/containeranalysis/synth.metadata index 1edc47ed9c3d..78fb1087f318 100644 --- a/containeranalysis/synth.metadata +++ b/containeranalysis/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-08-06T12:18:03.197749Z", + "updateTime": "2019-10-05T12:17:24.587398Z", "sources": [ { "generator": { "name": "artman", - "version": "0.32.1", - "dockerImage": "googleapis/artman@sha256:a684d40ba9a4e15946f5f2ca6b4bd9fe301192f522e9de4fff622118775f309b" + "version": "0.38.0", + "dockerImage": "googleapis/artman@sha256:0d2f8d429110aeb8d82df6550ef4ede59d40df9062d260a1580fce688b0512bf" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "e699b0cba64ffddfae39633417180f1f65875896", - "internalRef": "261759677" + "sha": "ceb8e2fb12f048cc94caae532ef0b4cf026a78f3", + "internalRef": "272971705" } }, { diff --git a/datacatalog/CHANGELOG.md b/datacatalog/CHANGELOG.md index f8cce1a5f3f5..7ae6b349db50 100644 --- a/datacatalog/CHANGELOG.md +++ b/datacatalog/CHANGELOG.md @@ -4,6 +4,27 @@ [1]: https://pypi.org/project/google-cloud-datacatalog/#history +## 0.4.0 + +10-23-2019 08:54 PDT + +### Implementation Changes + +- remove send/recv msg size limit (via synth) ([#8949](https://github.com/googleapis/google-cloud-python/pull/8949)) + +### New Features + +- add entry group operations ([#9520](https://github.com/googleapis/google-cloud-python/pull/9520)) + +### Documentation + +- fix intersphinx reference to requests ([#9294](https://github.com/googleapis/google-cloud-python/pull/9294)) +- remove CI for gh-pages, use googleapis.dev for api_core refs. ([#9085](https://github.com/googleapis/google-cloud-python/pull/9085)) +- remove unused import from samples (via synth). ([#9110](https://github.com/googleapis/google-cloud-python/pull/9110)) +- remove compatability badges from READMEs. ([#9035](https://github.com/googleapis/google-cloud-python/pull/9035)) +- update intersphinx mapping for requests. ([#8805](https://github.com/googleapis/google-cloud-python/pull/8805)) +- add 'search' sample (via synth). ([#8793](https://github.com/googleapis/google-cloud-python/pull/8793)) + ## 0.3.0 07-24-2019 15:58 PDT diff --git a/datacatalog/google/cloud/datacatalog_v1beta1/gapic/data_catalog_client.py b/datacatalog/google/cloud/datacatalog_v1beta1/gapic/data_catalog_client.py index 3be193c2d2f0..cea50cd34f7f 100644 --- a/datacatalog/google/cloud/datacatalog_v1beta1/gapic/data_catalog_client.py +++ b/datacatalog/google/cloud/datacatalog_v1beta1/gapic/data_catalog_client.py @@ -96,6 +96,16 @@ def entry_path(cls, project, location, entry_group, entry): entry=entry, ) + @classmethod + def entry_group_path(cls, project, location, entry_group): + """Return a fully-qualified entry_group string.""" + return google.api_core.path_template.expand( + "projects/{project}/locations/{location}/entryGroups/{entry_group}", + project=project, + location=location, + entry_group=entry_group, + ) + @classmethod def field_path(cls, project, location, tag_template, field): """Return a fully-qualified field string.""" @@ -268,7 +278,7 @@ def search_catalog( This is a custom method (https://cloud.google.com/apis/design/custom\_methods) and does not return the complete resource, only the resource identifier and high - level fields. Clients can subsequentally call Get methods. + level fields. Clients can subsequentally call ``Get`` methods. Note that searches do not have full recall. There may be results that match your query but are not returned, even in subsequent pages of @@ -330,15 +340,13 @@ def search_catalog( order_by (str): Specifies the ordering of results, currently supported case-sensitive choices are: - .. raw:: html + - ``relevance``, only supports desecending + - ``last_access_timestamp [asc|desc]``, defaults to descending if not + specified + - ``last_modified_timestamp [asc|desc]``, defaults to descending if not + specified -
    -
  • relevance
  • -
  • last_access_timestamp [asc|desc], defaults to descending if not - specified,
  • -
  • last_modified_timestamp [asc|desc], defaults to descending if not - specified.
  • -
+ If not specified, defaults to ``relevance`` descending. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will be retried using a default configuration. @@ -390,6 +398,338 @@ def search_catalog( ) return iterator + def create_entry_group( + self, + parent, + entry_group_id, + entry_group, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Alpha feature. Creates an EntryGroup. The user should enable the Data + Catalog API in the project identified by the ``parent`` parameter (see + [Data Catalog Resource Project] + (/data-catalog/docs/concepts/resource-project) for more information). + + Example: + >>> from google.cloud import datacatalog_v1beta1 + >>> + >>> client = datacatalog_v1beta1.DataCatalogClient() + >>> + >>> parent = client.location_path('[PROJECT]', '[LOCATION]') + >>> + >>> # TODO: Initialize `entry_group_id`: + >>> entry_group_id = '' + >>> + >>> # TODO: Initialize `entry_group`: + >>> entry_group = {} + >>> + >>> response = client.create_entry_group(parent, entry_group_id, entry_group) + + Args: + parent (str): Required. The name of the project this entry group is in. Example: + + - projects/{project\_id}/locations/{location} + + Note that this EntryGroup and its child resources may not actually be + stored in the location in this name. + entry_group_id (str): Required. The id of the entry group to create. + entry_group (Union[dict, ~google.cloud.datacatalog_v1beta1.types.EntryGroup]): The entry group to create. Defaults to an empty entry group. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.datacatalog_v1beta1.types.EntryGroup` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.datacatalog_v1beta1.types.EntryGroup` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "create_entry_group" not in self._inner_api_calls: + self._inner_api_calls[ + "create_entry_group" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.create_entry_group, + default_retry=self._method_configs["CreateEntryGroup"].retry, + default_timeout=self._method_configs["CreateEntryGroup"].timeout, + client_info=self._client_info, + ) + + request = datacatalog_pb2.CreateEntryGroupRequest( + parent=parent, entry_group_id=entry_group_id, entry_group=entry_group + ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("parent", parent)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + return self._inner_api_calls["create_entry_group"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def get_entry_group( + self, + name, + read_mask=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Alpha feature. + Gets an EntryGroup. + + Example: + >>> from google.cloud import datacatalog_v1beta1 + >>> + >>> client = datacatalog_v1beta1.DataCatalogClient() + >>> + >>> name = client.entry_group_path('[PROJECT]', '[LOCATION]', '[ENTRY_GROUP]') + >>> + >>> response = client.get_entry_group(name) + + Args: + name (str): Required. The name of the entry group. For example, + ``projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}``. + read_mask (Union[dict, ~google.cloud.datacatalog_v1beta1.types.FieldMask]): The fields to return. If not set or empty, all fields are returned. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.datacatalog_v1beta1.types.FieldMask` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.datacatalog_v1beta1.types.EntryGroup` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "get_entry_group" not in self._inner_api_calls: + self._inner_api_calls[ + "get_entry_group" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.get_entry_group, + default_retry=self._method_configs["GetEntryGroup"].retry, + default_timeout=self._method_configs["GetEntryGroup"].timeout, + client_info=self._client_info, + ) + + request = datacatalog_pb2.GetEntryGroupRequest(name=name, read_mask=read_mask) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("name", name)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + return self._inner_api_calls["get_entry_group"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def delete_entry_group( + self, + name, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Alpha feature. Deletes an EntryGroup. Only entry groups that do not + contain entries can be deleted. The user should enable the Data Catalog + API in the project identified by the ``name`` parameter (see [Data + Catalog Resource Project] (/data-catalog/docs/concepts/resource-project) + for more information). + + Example: + >>> from google.cloud import datacatalog_v1beta1 + >>> + >>> client = datacatalog_v1beta1.DataCatalogClient() + >>> + >>> name = client.entry_group_path('[PROJECT]', '[LOCATION]', '[ENTRY_GROUP]') + >>> + >>> client.delete_entry_group(name) + + Args: + name (str): Required. The name of the entry group. For example, + ``projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}``. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "delete_entry_group" not in self._inner_api_calls: + self._inner_api_calls[ + "delete_entry_group" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.delete_entry_group, + default_retry=self._method_configs["DeleteEntryGroup"].retry, + default_timeout=self._method_configs["DeleteEntryGroup"].timeout, + client_info=self._client_info, + ) + + request = datacatalog_pb2.DeleteEntryGroupRequest(name=name) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("name", name)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + self._inner_api_calls["delete_entry_group"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def create_entry( + self, + parent, + entry_id, + entry, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Alpha feature. Creates an entry. Currently only entries of 'FILESET' + type can be created. The user should enable the Data Catalog API in the + project identified by the ``parent`` parameter (see [Data Catalog + Resource Project] (/data-catalog/docs/concepts/resource-project) for + more information). + + Example: + >>> from google.cloud import datacatalog_v1beta1 + >>> + >>> client = datacatalog_v1beta1.DataCatalogClient() + >>> + >>> parent = client.entry_group_path('[PROJECT]', '[LOCATION]', '[ENTRY_GROUP]') + >>> + >>> # TODO: Initialize `entry_id`: + >>> entry_id = '' + >>> + >>> # TODO: Initialize `entry`: + >>> entry = {} + >>> + >>> response = client.create_entry(parent, entry_id, entry) + + Args: + parent (str): Required. The name of the entry group this entry is in. Example: + + - projects/{project\_id}/locations/{location}/entryGroups/{entry\_group\_id} + + Note that this Entry and its child resources may not actually be stored + in the location in this name. + entry_id (str): Required. The id of the entry to create. + entry (Union[dict, ~google.cloud.datacatalog_v1beta1.types.Entry]): Required. The entry to create. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.datacatalog_v1beta1.types.Entry` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.datacatalog_v1beta1.types.Entry` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "create_entry" not in self._inner_api_calls: + self._inner_api_calls[ + "create_entry" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.create_entry, + default_retry=self._method_configs["CreateEntry"].retry, + default_timeout=self._method_configs["CreateEntry"].timeout, + client_info=self._client_info, + ) + + request = datacatalog_pb2.CreateEntryRequest( + parent=parent, entry_id=entry_id, entry=entry + ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("parent", parent)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + return self._inner_api_calls["create_entry"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + def update_entry( self, entry, @@ -399,7 +739,10 @@ def update_entry( metadata=None, ): """ - Updates an existing entry. + Updates an existing entry. The user should enable the Data Catalog API + in the project identified by the ``entry.name`` parameter (see [Data + Catalog Resource Project] (/data-catalog/docs/concepts/resource-project) + for more information). Example: >>> from google.cloud import datacatalog_v1beta1 @@ -412,22 +755,26 @@ def update_entry( >>> response = client.update_entry(entry) Args: - entry (Union[dict, ~google.cloud.datacatalog_v1beta1.types.Entry]): Required. The updated Entry. + entry (Union[dict, ~google.cloud.datacatalog_v1beta1.types.Entry]): Required. The updated entry. The "name" field must be set. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.datacatalog_v1beta1.types.Entry` - update_mask (Union[dict, ~google.cloud.datacatalog_v1beta1.types.FieldMask]): Optional. The fields to update on the entry. If absent or empty, all - modifiable fields are updated. + update_mask (Union[dict, ~google.cloud.datacatalog_v1beta1.types.FieldMask]): The fields to update on the entry. If absent or empty, all modifiable + fields are updated. - Modifiable fields in synced entries: + The following fields are modifiable: - 1. schema (Pub/Sub topics only) + - For entries with type ``DATA_STREAM``: - Modifiable fields in native entries: + - ``schema`` - 1. display\_name - 2. description - 3. schema + - For entries with type ``FILESET`` + + - ``schema`` + - ``display_name`` + - ``description`` + - ``gcs_fileset_spec`` + - ``gcs_fileset_spec.file_patterns`` If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.datacatalog_v1beta1.types.FieldMask` @@ -481,6 +828,78 @@ def update_entry( request, retry=retry, timeout=timeout, metadata=metadata ) + def delete_entry( + self, + name, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Alpha feature. Deletes an existing entry. Only entries created through + ``CreateEntry`` method can be deleted. The user should enable the Data + Catalog API in the project identified by the ``name`` parameter (see + [Data Catalog Resource Project] + (/data-catalog/docs/concepts/resource-project) for more information). + + Example: + >>> from google.cloud import datacatalog_v1beta1 + >>> + >>> client = datacatalog_v1beta1.DataCatalogClient() + >>> + >>> name = client.entry_path('[PROJECT]', '[LOCATION]', '[ENTRY_GROUP]', '[ENTRY]') + >>> + >>> client.delete_entry(name) + + Args: + name (str): Required. The name of the entry. Example: + + - projects/{project\_id}/locations/{location}/entryGroups/{entry\_group\_id}/entries/{entry\_id} + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "delete_entry" not in self._inner_api_calls: + self._inner_api_calls[ + "delete_entry" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.delete_entry, + default_retry=self._method_configs["DeleteEntry"].retry, + default_timeout=self._method_configs["DeleteEntry"].timeout, + client_info=self._client_info, + ) + + request = datacatalog_pb2.DeleteEntryRequest(name=name) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("name", name)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + self._inner_api_calls["delete_entry"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + def get_entry( self, name, @@ -501,8 +920,14 @@ def get_entry( >>> response = client.get_entry(name) Args: - name (str): Required. The name of the entry. For example, - "projects/{project\_id}/locations/{location}/entryGroups/{entry\_group\_id}/entries/{entry\_id}". + name (str): Required. The name of the entry. Example: + + - projects/{project\_id}/locations/{location}/entryGroups/{entry\_group\_id}/entries/{entry\_id} + + Entry groups are logical groupings of entries. Currently, users cannot + create/modify entry groups. They are created by Data Catalog; they + include ``@bigquery`` for all BigQuery entries, and ``@pubsub`` for all + Cloud Pub/Sub entries. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will be retried using a default configuration. @@ -574,26 +999,24 @@ def lookup_entry( Args: linked_resource (str): The full name of the Google Cloud Platform resource the Data Catalog entry represents. See: - https://cloud.google.com/apis/design/resource\_names#full\_resource\_name + https://cloud.google.com/apis/design/resource\_names#full\_resource\_name. Full names are case-sensitive. Examples: - "//bigquery.googleapis.com/projects/projectId/datasets/datasetId/tables/tableId". - "//pubsub.googleapis.com/projects/projectId/topics/topicId" + + - //bigquery.googleapis.com/projects/projectId/datasets/datasetId/tables/tableId + - //pubsub.googleapis.com/projects/projectId/topics/topicId sql_resource (str): The SQL name of the entry. SQL names are case-sensitive. Examples: - .. raw:: html + - ``cloud_pubsub.project_id.topic_id`` + - ``pubsub.project_id.`topic.id.with.dots``` + - ``bigquery.project_id.dataset_id.table_id`` + - ``datacatalog.project_id.location_id.entry_group_id.entry_id`` -
    -
  • cloud_pubsub.project_id.topic_id
  • -
  • pubsub.project_id.`topic.id.with.dots`
  • -
  • bigquery.project_id.dataset_id.table_id
  • -
  • datacatalog.project_id.location_id.entry_group_id.entry_id
  • -
- *_ids shoud satisfy the standard SQL rules for identifiers. - https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical + ``*_id``\ s shoud satisfy the standard SQL rules for identifiers. + https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will be retried using a default configuration. @@ -647,7 +1070,11 @@ def create_tag_template( metadata=None, ): """ - Creates a tag template. + Creates a tag template. The user should enable the Data Catalog API in + the project identified by the ``parent`` parameter (see `Data Catalog + Resource + Project `__ + for more information). Example: >>> from google.cloud import datacatalog_v1beta1 @@ -666,7 +1093,10 @@ def create_tag_template( Args: parent (str): Required. The name of the project and the location this template is in. - Example: "projects/{project\_id}/locations/{location}". Note that this + Example: + + - projects/{project\_id}/locations/{location} + TagTemplate and its child resources may not actually be stored in the location in this name. tag_template_id (str): Required. The id of the tag template to create. @@ -744,8 +1174,9 @@ def get_tag_template( >>> response = client.get_tag_template(name) Args: - name (str): Required. The name of the tag template. For example, - "projects/{project\_id}/locations/{location}/tagTemplates/{tag\_template\_id}". + name (str): Required. The name of the tag template. Example: + + - projects/{project\_id}/locations/{location}/tagTemplates/{tag\_template\_id} retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will be retried using a default configuration. @@ -803,9 +1234,13 @@ def update_tag_template( metadata=None, ): """ - Updates a tag template. This method cannot be used to update the fields of - a template. The tag template fields are represented as separate resources - and should be updated using their own create/update/delete methods. + Updates a tag template. This method cannot be used to update the fields + of a template. The tag template fields are represented as separate + resources and should be updated using their own create/update/delete + methods. The user should enable the Data Catalog API in the project + identified by the ``tag_template.name`` parameter (see [Data Catalog + Resource Project] (/data-catalog/docs/concepts/resource-project) for + more information). Example: >>> from google.cloud import datacatalog_v1beta1 @@ -818,19 +1253,17 @@ def update_tag_template( >>> response = client.update_tag_template(tag_template) Args: - tag_template (Union[dict, ~google.cloud.datacatalog_v1beta1.types.TagTemplate]): Required. The template to update. + tag_template (Union[dict, ~google.cloud.datacatalog_v1beta1.types.TagTemplate]): Required. The template to update. The "name" field must be set. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.datacatalog_v1beta1.types.TagTemplate` - update_mask (Union[dict, ~google.cloud.datacatalog_v1beta1.types.FieldMask]): Optional. The field mask specifies the parts of the template to - overwrite. + update_mask (Union[dict, ~google.cloud.datacatalog_v1beta1.types.FieldMask]): The field mask specifies the parts of the template to overwrite. Allowed fields: - - display\_name + - ``display_name`` - If update\_mask is omitted, all of the allowed fields above will be - updated. + If absent or empty, all of the allowed fields above will be updated. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.datacatalog_v1beta1.types.FieldMask` @@ -893,7 +1326,10 @@ def delete_tag_template( metadata=None, ): """ - Deletes a tag template and all tags using the template. + Deletes a tag template and all tags using the template. The user should + enable the Data Catalog API in the project identified by the ``name`` + parameter (see [Data Catalog Resource Project] + (/data-catalog/docs/concepts/resource-project) for more information). Example: >>> from google.cloud import datacatalog_v1beta1 @@ -908,11 +1344,12 @@ def delete_tag_template( >>> client.delete_tag_template(name, force) Args: - name (str): Required. The name of the tag template to delete. For example, - "projects/{project\_id}/locations/{location}/tagTemplates/{tag\_template\_id}". - force (bool): Required. Currently, this field must always be set to true. - This confirms the deletion of any possible tags using this template. - force = false will be supported in the future. + name (str): Required. The name of the tag template to delete. Example: + + - projects/{project\_id}/locations/{location}/tagTemplates/{tag\_template\_id} + force (bool): Required. Currently, this field must always be set to ``true``. This + confirms the deletion of any possible tags using this template. + ``force = false`` will be supported in the future. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will be retried using a default configuration. @@ -968,7 +1405,11 @@ def create_tag_template_field( metadata=None, ): """ - Creates a field in a tag template. + Creates a field in a tag template. The user should enable the Data + Catalog API in the project identified by the ``parent`` parameter (see + `Data Catalog Resource + Project `__ + for more information). Example: >>> from google.cloud import datacatalog_v1beta1 @@ -987,14 +1428,16 @@ def create_tag_template_field( Args: parent (str): Required. The name of the project this template is in. Example: - "projects/{project\_id}/locations/{location}/tagTemplates/{tag\_template\_id}". + + - projects/{project\_id}/locations/{location}/tagTemplates/{tag\_template\_id} + Note that this TagTemplateField may not actually be stored in the location in this name. - tag_template_field_id (str): Required. The id of the tag template field to create. Field ids can + tag_template_field_id (str): Required. The ID of the tag template field to create. Field ids can contain letters (both uppercase and lowercase), numbers (0-9), - underscores (\_) and dashes (-). Field ids must be at least 1 character - long and at most 128 characters long. Field ids must also be unique to - their template. + underscores (\_) and dashes (-). Field IDs must be at least 1 character + long and at most 128 characters long. Field IDs must also be unique + within their template. tag_template_field (Union[dict, ~google.cloud.datacatalog_v1beta1.types.TagTemplateField]): Required. The tag template field to create. If a dict is provided, it must be of the same form as the protobuf @@ -1061,8 +1504,11 @@ def update_tag_template_field( metadata=None, ): """ - Updates a field in a tag template. This method cannot be used to update the - field type. + Updates a field in a tag template. This method cannot be used to update + the field type. The user should enable the Data Catalog API in the + project identified by the ``name`` parameter (see [Data Catalog Resource + Project] (/data-catalog/docs/concepts/resource-project) for more + information). Example: >>> from google.cloud import datacatalog_v1beta1 @@ -1077,20 +1523,21 @@ def update_tag_template_field( >>> response = client.update_tag_template_field(name, tag_template_field) Args: - name (str): Required. The name of the tag template field. For example, - "projects/{project\_id}/locations/{location}/tagTemplates/{tag\_template\_id}/fields/{tag\_template\_field\_id}". + name (str): Required. The name of the tag template field. Example: + + - projects/{project\_id}/locations/{location}/tagTemplates/{tag\_template\_id}/fields/{tag\_template\_field\_id} tag_template_field (Union[dict, ~google.cloud.datacatalog_v1beta1.types.TagTemplateField]): Required. The template to update. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.datacatalog_v1beta1.types.TagTemplateField` - update_mask (Union[dict, ~google.cloud.datacatalog_v1beta1.types.FieldMask]): Optional. The field mask specifies the parts of the template to - overwrite. Allowed fields: + update_mask (Union[dict, ~google.cloud.datacatalog_v1beta1.types.FieldMask]): The field mask specifies the parts of the template to be updated. + Allowed fields: - - display\_name - - type.enum\_type + - ``display_name`` + - ``type.enum_type`` - If update\_mask is omitted, all of the allowed fields above will be - updated. + If ``update_mask`` is not set or empty, all of the allowed fields above + will be updated. When updating an enum type, the provided values will be merged with the existing values. Therefore, enum values can only be added, existing enum @@ -1157,7 +1604,11 @@ def rename_tag_template_field( metadata=None, ): """ - Renames a field in a tag template. + Renames a field in a tag template. The user should enable the Data + Catalog API in the project identified by the ``name`` parameter (see + `Data Catalog Resource + Project `__ + for more information). Example: >>> from google.cloud import datacatalog_v1beta1 @@ -1172,10 +1623,11 @@ def rename_tag_template_field( >>> response = client.rename_tag_template_field(name, new_tag_template_field_id) Args: - name (str): Required. The name of the tag template. For example, - "projects/{project\_id}/locations/{location}/tagTemplates/{tag\_template\_id}/fields/{tag\_template\_field\_id}". + name (str): Required. The name of the tag template. Example: + + - projects/{project\_id}/locations/{location}/tagTemplates/{tag\_template\_id}/fields/{tag\_template\_field\_id} new_tag_template_field_id (str): Required. The new ID of this tag template field. For example, - "my\_new\_field". + ``my_new_field``. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will be retried using a default configuration. @@ -1235,7 +1687,10 @@ def delete_tag_template_field( metadata=None, ): """ - Deletes a field in a tag template and all uses of that field. + Deletes a field in a tag template and all uses of that field. The user + should enable the Data Catalog API in the project identified by the + ``name`` parameter (see [Data Catalog Resource Project] + (/data-catalog/docs/concepts/resource-project) for more information). Example: >>> from google.cloud import datacatalog_v1beta1 @@ -1250,11 +1705,12 @@ def delete_tag_template_field( >>> client.delete_tag_template_field(name, force) Args: - name (str): Required. The name of the tag template field to delete. For example, - "projects/{project\_id}/locations/{location}/tagTemplates/{tag\_template\_id}/fields/{tag\_template\_field\_id}". - force (bool): Required. Currently, this field must always be set to true. - This confirms the deletion of this field from any tags using this field. - force = false will be supported in the future. + name (str): Required. The name of the tag template field to delete. Example: + + - projects/{project\_id}/locations/{location}/tagTemplates/{tag\_template\_id}/fields/{tag\_template\_field\_id} + force (bool): Required. Currently, this field must always be set to ``true``. This + confirms the deletion of this field from any tags using this field. + ``force = false`` will be supported in the future. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will be retried using a default configuration. @@ -1309,7 +1765,12 @@ def create_tag( metadata=None, ): """ - Creates a tag on an ``Entry``. + Creates a tag on an ``Entry``. Note: The project identified by the + ``parent`` parameter for the + `tag `__ + and the `tag + template `__ + used to create the tag must be from the same organization. Example: >>> from google.cloud import datacatalog_v1beta1 @@ -1325,8 +1786,10 @@ def create_tag( Args: parent (str): Required. The name of the resource to attach this tag to. Tags can be - attached to Entries. (example: - "projects/{project\_id}/locations/{location}/entryGroups/{entry\_group\_id}/entries/{entry\_id}"). + attached to Entries. Example: + + - projects/{project\_id}/locations/{location}/entryGroups/{entry\_group\_id}/entries/{entry\_id} + Note that this Tag and its child resources may not actually be stored in the location in this name. tag (Union[dict, ~google.cloud.datacatalog_v1beta1.types.Tag]): Required. The tag to create. @@ -1403,13 +1866,13 @@ def update_tag( >>> response = client.update_tag(tag) Args: - tag (Union[dict, ~google.cloud.datacatalog_v1beta1.types.Tag]): Required. The updated tag. + tag (Union[dict, ~google.cloud.datacatalog_v1beta1.types.Tag]): Required. The updated tag. The "name" field must be set. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.datacatalog_v1beta1.types.Tag` - update_mask (Union[dict, ~google.cloud.datacatalog_v1beta1.types.FieldMask]): Optional. The fields to update on the Tag. If absent or empty, all - modifiable fields are updated. Currently the only modifiable field is - the field ``fields``. + update_mask (Union[dict, ~google.cloud.datacatalog_v1beta1.types.FieldMask]): The fields to update on the Tag. If absent or empty, all modifiable + fields are updated. Currently the only modifiable field is the field + ``fields``. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.datacatalog_v1beta1.types.FieldMask` @@ -1481,8 +1944,9 @@ def delete_tag( >>> client.delete_tag(name) Args: - name (str): Required. The name of the tag to delete. For example, - "projects/{project\_id}/locations/{location}/entryGroups/{entry\_group\_id}/entries/{entry\_id}/tags/{tag\_id}". + name (str): Required. The name of the tag to delete. Example: + + - projects/{project\_id}/locations/{location}/entryGroups/{entry\_group\_id}/entries/{entry\_id}/tags/{tag\_id} retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will be retried using a default configuration. @@ -1642,13 +2106,19 @@ def set_iam_policy( Sets the access control policy for a resource. Replaces any existing policy. Supported resources are: - - Tag templates. Note, this method cannot be used to manage policies - for BigQuery, Cloud Pub/Sub and any external Google Cloud Platform + - Tag templates. + - Entries. + - Entry groups. Note, this method cannot be used to manage policies for + BigQuery, Cloud Pub/Sub and any external Google Cloud Platform resources synced to Cloud Data Catalog. Callers must have following Google IAM permission - ``datacatalog.tagTemplates.setIamPolicy`` to set policies on tag - templates. + + - ``datacatalog.tagTemplates.setIamPolicy`` to set policies on tag + templates. + - ``datacatalog.entries.setIamPolicy`` to set policies on entries. + - ``datacatalog.entryGroups.setIamPolicy`` to set policies on entry + groups. Example: >>> from google.cloud import datacatalog_v1beta1 @@ -1735,13 +2205,19 @@ def get_iam_policy( Supported resources are: - - Tag templates. Note, this method cannot be used to manage policies - for BigQuery, Cloud Pub/Sub and any external Google Cloud Platform + - Tag templates. + - Entries. + - Entry groups. Note, this method cannot be used to manage policies for + BigQuery, Cloud Pub/Sub and any external Google Cloud Platform resources synced to Cloud Data Catalog. Callers must have following Google IAM permission - ``datacatalog.tagTemplates.getIamPolicy`` to get policies on tag - templates. + + - ``datacatalog.tagTemplates.getIamPolicy`` to get policies on tag + templates. + - ``datacatalog.entries.getIamPolicy`` to get policies on entries. + - ``datacatalog.entryGroups.getIamPolicy`` to get policies on entry + groups. Example: >>> from google.cloud import datacatalog_v1beta1 @@ -1823,10 +2299,12 @@ def test_iam_permissions( exist, an empty set of permissions is returned (We don't return a ``NOT_FOUND`` error). - Supported resource are: + Supported resources are: - - tag templates. Note, this method cannot be used to manage policies - for BigQuery, Cloud Pub/Sub and any external Google Cloud Platform + - Tag templates. + - Entries. + - Entry groups. Note, this method cannot be used to manage policies for + BigQuery, Cloud Pub/Sub and any external Google Cloud Platform resources synced to Cloud Data Catalog. A caller is not required to have Google IAM permission to make this diff --git a/datacatalog/google/cloud/datacatalog_v1beta1/gapic/data_catalog_client_config.py b/datacatalog/google/cloud/datacatalog_v1beta1/gapic/data_catalog_client_config.py index a1bda46164f0..065f9970864c 100644 --- a/datacatalog/google/cloud/datacatalog_v1beta1/gapic/data_catalog_client_config.py +++ b/datacatalog/google/cloud/datacatalog_v1beta1/gapic/data_catalog_client_config.py @@ -22,11 +22,36 @@ "retry_codes_name": "non_idempotent", "retry_params_name": "default", }, + "CreateEntryGroup": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default", + }, + "GetEntryGroup": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default", + }, + "DeleteEntryGroup": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default", + }, + "CreateEntry": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default", + }, "UpdateEntry": { "timeout_millis": 60000, "retry_codes_name": "non_idempotent", "retry_params_name": "default", }, + "DeleteEntry": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default", + }, "GetEntry": { "timeout_millis": 60000, "retry_codes_name": "idempotent", diff --git a/datacatalog/google/cloud/datacatalog_v1beta1/gapic/enums.py b/datacatalog/google/cloud/datacatalog_v1beta1/gapic/enums.py index 954b282e5021..de76fc9f8a3b 100644 --- a/datacatalog/google/cloud/datacatalog_v1beta1/gapic/enums.py +++ b/datacatalog/google/cloud/datacatalog_v1beta1/gapic/enums.py @@ -21,19 +21,24 @@ class EntryType(enum.IntEnum): """ - Entry resources in Data Catalog can be of different types e.g. BigQuery - Table entry is of type 'TABLE'. This enum describes all the possible types - Data Catalog contains. + Entry resources in Data Catalog can be of different types e.g. a + BigQuery Table entry is of type ``TABLE``. This enum describes all the + possible types Data Catalog contains. Attributes: ENTRY_TYPE_UNSPECIFIED (int): Default unknown type - TABLE (int): The type of entry that has a GoogleSQL schema, including logical views. - DATA_STREAM (int): An entry type which is used for streaming entries. Example - Pub/Sub. + TABLE (int): Output only. The type of entry that has a GoogleSQL schema, including + logical views. + DATA_STREAM (int): Output only. An entry type which is used for streaming entries. Example: + Cloud Pub/Sub topic. + FILESET (int): Alpha feature. An entry type which is a set of files or objects. Example: + Cloud Storage fileset. """ ENTRY_TYPE_UNSPECIFIED = 0 TABLE = 2 DATA_STREAM = 3 + FILESET = 4 class SearchResultType(enum.IntEnum): diff --git a/datacatalog/google/cloud/datacatalog_v1beta1/gapic/transports/data_catalog_grpc_transport.py b/datacatalog/google/cloud/datacatalog_v1beta1/gapic/transports/data_catalog_grpc_transport.py index 5f1e9639e7a6..2cbeb340f133 100644 --- a/datacatalog/google/cloud/datacatalog_v1beta1/gapic/transports/data_catalog_grpc_transport.py +++ b/datacatalog/google/cloud/datacatalog_v1beta1/gapic/transports/data_catalog_grpc_transport.py @@ -117,7 +117,7 @@ def search_catalog(self): This is a custom method (https://cloud.google.com/apis/design/custom\_methods) and does not return the complete resource, only the resource identifier and high - level fields. Clients can subsequentally call Get methods. + level fields. Clients can subsequentally call ``Get`` methods. Note that searches do not have full recall. There may be results that match your query but are not returned, even in subsequent pages of @@ -135,11 +135,78 @@ def search_catalog(self): """ return self._stubs["data_catalog_stub"].SearchCatalog + @property + def create_entry_group(self): + """Return the gRPC stub for :meth:`DataCatalogClient.create_entry_group`. + + Alpha feature. Creates an EntryGroup. The user should enable the Data + Catalog API in the project identified by the ``parent`` parameter (see + [Data Catalog Resource Project] + (/data-catalog/docs/concepts/resource-project) for more information). + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["data_catalog_stub"].CreateEntryGroup + + @property + def get_entry_group(self): + """Return the gRPC stub for :meth:`DataCatalogClient.get_entry_group`. + + Alpha feature. + Gets an EntryGroup. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["data_catalog_stub"].GetEntryGroup + + @property + def delete_entry_group(self): + """Return the gRPC stub for :meth:`DataCatalogClient.delete_entry_group`. + + Alpha feature. Deletes an EntryGroup. Only entry groups that do not + contain entries can be deleted. The user should enable the Data Catalog + API in the project identified by the ``name`` parameter (see [Data + Catalog Resource Project] (/data-catalog/docs/concepts/resource-project) + for more information). + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["data_catalog_stub"].DeleteEntryGroup + + @property + def create_entry(self): + """Return the gRPC stub for :meth:`DataCatalogClient.create_entry`. + + Alpha feature. Creates an entry. Currently only entries of 'FILESET' + type can be created. The user should enable the Data Catalog API in the + project identified by the ``parent`` parameter (see [Data Catalog + Resource Project] (/data-catalog/docs/concepts/resource-project) for + more information). + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["data_catalog_stub"].CreateEntry + @property def update_entry(self): """Return the gRPC stub for :meth:`DataCatalogClient.update_entry`. - Updates an existing entry. + Updates an existing entry. The user should enable the Data Catalog API + in the project identified by the ``entry.name`` parameter (see [Data + Catalog Resource Project] (/data-catalog/docs/concepts/resource-project) + for more information). Returns: Callable: A callable which accepts the appropriate @@ -148,6 +215,23 @@ def update_entry(self): """ return self._stubs["data_catalog_stub"].UpdateEntry + @property + def delete_entry(self): + """Return the gRPC stub for :meth:`DataCatalogClient.delete_entry`. + + Alpha feature. Deletes an existing entry. Only entries created through + ``CreateEntry`` method can be deleted. The user should enable the Data + Catalog API in the project identified by the ``name`` parameter (see + [Data Catalog Resource Project] + (/data-catalog/docs/concepts/resource-project) for more information). + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["data_catalog_stub"].DeleteEntry + @property def get_entry(self): """Return the gRPC stub for :meth:`DataCatalogClient.get_entry`. @@ -180,7 +264,11 @@ def lookup_entry(self): def create_tag_template(self): """Return the gRPC stub for :meth:`DataCatalogClient.create_tag_template`. - Creates a tag template. + Creates a tag template. The user should enable the Data Catalog API in + the project identified by the ``parent`` parameter (see `Data Catalog + Resource + Project `__ + for more information). Returns: Callable: A callable which accepts the appropriate @@ -206,9 +294,13 @@ def get_tag_template(self): def update_tag_template(self): """Return the gRPC stub for :meth:`DataCatalogClient.update_tag_template`. - Updates a tag template. This method cannot be used to update the fields of - a template. The tag template fields are represented as separate resources - and should be updated using their own create/update/delete methods. + Updates a tag template. This method cannot be used to update the fields + of a template. The tag template fields are represented as separate + resources and should be updated using their own create/update/delete + methods. The user should enable the Data Catalog API in the project + identified by the ``tag_template.name`` parameter (see [Data Catalog + Resource Project] (/data-catalog/docs/concepts/resource-project) for + more information). Returns: Callable: A callable which accepts the appropriate @@ -221,7 +313,10 @@ def update_tag_template(self): def delete_tag_template(self): """Return the gRPC stub for :meth:`DataCatalogClient.delete_tag_template`. - Deletes a tag template and all tags using the template. + Deletes a tag template and all tags using the template. The user should + enable the Data Catalog API in the project identified by the ``name`` + parameter (see [Data Catalog Resource Project] + (/data-catalog/docs/concepts/resource-project) for more information). Returns: Callable: A callable which accepts the appropriate @@ -234,7 +329,11 @@ def delete_tag_template(self): def create_tag_template_field(self): """Return the gRPC stub for :meth:`DataCatalogClient.create_tag_template_field`. - Creates a field in a tag template. + Creates a field in a tag template. The user should enable the Data + Catalog API in the project identified by the ``parent`` parameter (see + `Data Catalog Resource + Project `__ + for more information). Returns: Callable: A callable which accepts the appropriate @@ -247,8 +346,11 @@ def create_tag_template_field(self): def update_tag_template_field(self): """Return the gRPC stub for :meth:`DataCatalogClient.update_tag_template_field`. - Updates a field in a tag template. This method cannot be used to update the - field type. + Updates a field in a tag template. This method cannot be used to update + the field type. The user should enable the Data Catalog API in the + project identified by the ``name`` parameter (see [Data Catalog Resource + Project] (/data-catalog/docs/concepts/resource-project) for more + information). Returns: Callable: A callable which accepts the appropriate @@ -261,7 +363,11 @@ def update_tag_template_field(self): def rename_tag_template_field(self): """Return the gRPC stub for :meth:`DataCatalogClient.rename_tag_template_field`. - Renames a field in a tag template. + Renames a field in a tag template. The user should enable the Data + Catalog API in the project identified by the ``name`` parameter (see + `Data Catalog Resource + Project `__ + for more information). Returns: Callable: A callable which accepts the appropriate @@ -274,7 +380,10 @@ def rename_tag_template_field(self): def delete_tag_template_field(self): """Return the gRPC stub for :meth:`DataCatalogClient.delete_tag_template_field`. - Deletes a field in a tag template and all uses of that field. + Deletes a field in a tag template and all uses of that field. The user + should enable the Data Catalog API in the project identified by the + ``name`` parameter (see [Data Catalog Resource Project] + (/data-catalog/docs/concepts/resource-project) for more information). Returns: Callable: A callable which accepts the appropriate @@ -287,7 +396,12 @@ def delete_tag_template_field(self): def create_tag(self): """Return the gRPC stub for :meth:`DataCatalogClient.create_tag`. - Creates a tag on an ``Entry``. + Creates a tag on an ``Entry``. Note: The project identified by the + ``parent`` parameter for the + `tag `__ + and the `tag + template `__ + used to create the tag must be from the same organization. Returns: Callable: A callable which accepts the appropriate @@ -342,13 +456,19 @@ def set_iam_policy(self): Sets the access control policy for a resource. Replaces any existing policy. Supported resources are: - - Tag templates. Note, this method cannot be used to manage policies - for BigQuery, Cloud Pub/Sub and any external Google Cloud Platform + - Tag templates. + - Entries. + - Entry groups. Note, this method cannot be used to manage policies for + BigQuery, Cloud Pub/Sub and any external Google Cloud Platform resources synced to Cloud Data Catalog. Callers must have following Google IAM permission - ``datacatalog.tagTemplates.setIamPolicy`` to set policies on tag - templates. + + - ``datacatalog.tagTemplates.setIamPolicy`` to set policies on tag + templates. + - ``datacatalog.entries.setIamPolicy`` to set policies on entries. + - ``datacatalog.entryGroups.setIamPolicy`` to set policies on entry + groups. Returns: Callable: A callable which accepts the appropriate @@ -367,13 +487,19 @@ def get_iam_policy(self): Supported resources are: - - Tag templates. Note, this method cannot be used to manage policies - for BigQuery, Cloud Pub/Sub and any external Google Cloud Platform + - Tag templates. + - Entries. + - Entry groups. Note, this method cannot be used to manage policies for + BigQuery, Cloud Pub/Sub and any external Google Cloud Platform resources synced to Cloud Data Catalog. Callers must have following Google IAM permission - ``datacatalog.tagTemplates.getIamPolicy`` to get policies on tag - templates. + + - ``datacatalog.tagTemplates.getIamPolicy`` to get policies on tag + templates. + - ``datacatalog.entries.getIamPolicy`` to get policies on entries. + - ``datacatalog.entryGroups.getIamPolicy`` to get policies on entry + groups. Returns: Callable: A callable which accepts the appropriate @@ -390,10 +516,12 @@ def test_iam_permissions(self): exist, an empty set of permissions is returned (We don't return a ``NOT_FOUND`` error). - Supported resource are: + Supported resources are: - - tag templates. Note, this method cannot be used to manage policies - for BigQuery, Cloud Pub/Sub and any external Google Cloud Platform + - Tag templates. + - Entries. + - Entry groups. Note, this method cannot be used to manage policies for + BigQuery, Cloud Pub/Sub and any external Google Cloud Platform resources synced to Cloud Data Catalog. A caller is not required to have Google IAM permission to make this diff --git a/datacatalog/google/cloud/datacatalog_v1beta1/proto/datacatalog.proto b/datacatalog/google/cloud/datacatalog_v1beta1/proto/datacatalog.proto index e89e7ad62671..8b67be1a0d29 100644 --- a/datacatalog/google/cloud/datacatalog_v1beta1/proto/datacatalog.proto +++ b/datacatalog/google/cloud/datacatalog_v1beta1/proto/datacatalog.proto @@ -18,6 +18,10 @@ syntax = "proto3"; package google.cloud.datacatalog.v1beta1; import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; +import "google/cloud/datacatalog/v1beta1/gcs_fileset_spec.proto"; import "google/cloud/datacatalog/v1beta1/schema.proto"; import "google/cloud/datacatalog/v1beta1/search.proto"; import "google/cloud/datacatalog/v1beta1/table_spec.proto"; @@ -27,7 +31,6 @@ import "google/iam/v1/iam_policy.proto"; import "google/iam/v1/policy.proto"; import "google/protobuf/empty.proto"; import "google/protobuf/field_mask.proto"; -import "google/api/client.proto"; option cc_enable_arenas = true; option go_package = "google.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1;datacatalog"; @@ -38,7 +41,8 @@ option java_package = "com.google.cloud.datacatalog"; // their data. service DataCatalog { option (google.api.default_host) = "datacatalog.googleapis.com"; - option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; + option (google.api.oauth_scopes) = + "https://www.googleapis.com/auth/cloud-platform"; // Searches Data Catalog for multiple resources like entries, tags that // match a query. @@ -46,7 +50,7 @@ service DataCatalog { // This is a custom method // (https://cloud.google.com/apis/design/custom_methods) and does not return // the complete resource, only the resource identifier and high level - // fields. Clients can subsequentally call Get methods. + // fields. Clients can subsequentally call `Get` methods. // // Note that searches do not have full recall. There may be results that match // your query but are not returned, even in subsequent pages of results. These @@ -60,14 +64,83 @@ service DataCatalog { post: "/v1beta1/catalog:search" body: "*" }; + option (google.api.method_signature) = "scope,query"; + } + + // Alpha feature. + // Creates an EntryGroup. + // The user should enable the Data Catalog API in the project identified by + // the `parent` parameter (see [Data Catalog Resource Project] + // (/data-catalog/docs/concepts/resource-project) for more information). + rpc CreateEntryGroup(CreateEntryGroupRequest) returns (EntryGroup) { + option (google.api.http) = { + post: "/v1beta1/{parent=projects/*/locations/*}/entryGroups" + body: "entry_group" + }; + option (google.api.method_signature) = "parent,entry_group_id,entry_group"; + } + + // Alpha feature. + // Gets an EntryGroup. + rpc GetEntryGroup(GetEntryGroupRequest) returns (EntryGroup) { + option (google.api.http) = { + get: "/v1beta1/{name=projects/*/locations/*/entryGroups/*}" + }; + option (google.api.method_signature) = "name"; + option (google.api.method_signature) = "name,read_mask"; + } + + // Alpha feature. + // Deletes an EntryGroup. Only entry groups that do not contain entries can be + // deleted. The user should enable the Data Catalog API in the project + // identified by the `name` parameter (see [Data Catalog Resource Project] + // (/data-catalog/docs/concepts/resource-project) for more information). + rpc DeleteEntryGroup(DeleteEntryGroupRequest) + returns (google.protobuf.Empty) { + option (google.api.http) = { + delete: "/v1beta1/{name=projects/*/locations/*/entryGroups/*}" + }; + option (google.api.method_signature) = "name"; + } + + // Alpha feature. + // Creates an entry. Currently only entries of 'FILESET' type can be created. + // The user should enable the Data Catalog API in the project identified by + // the `parent` parameter (see [Data Catalog Resource Project] + // (/data-catalog/docs/concepts/resource-project) for more information). + rpc CreateEntry(CreateEntryRequest) returns (Entry) { + option (google.api.http) = { + post: "/v1beta1/{parent=projects/*/locations/*/entryGroups/*}/entries" + body: "entry" + }; + option (google.api.method_signature) = "parent,entry_id,entry"; } // Updates an existing entry. + // The user should enable the Data Catalog API in the project identified by + // the `entry.name` parameter (see [Data Catalog Resource Project] + // (/data-catalog/docs/concepts/resource-project) for more information). rpc UpdateEntry(UpdateEntryRequest) returns (Entry) { option (google.api.http) = { patch: "/v1beta1/{entry.name=projects/*/locations/*/entryGroups/*/entries/*}" body: "entry" }; + option (google.api.method_signature) = "entry"; + option (google.api.method_signature) = "entry,update_mask"; + } + + // Alpha feature. + // Deletes an existing entry. Only entries created through + // [CreateEntry][google.cloud.datacatalog.v1beta1.DataCatalog.CreateEntry] + // method can be deleted. + // The user should enable the Data Catalog API in the project identified by + // the `name` parameter (see [Data Catalog Resource Project] + // (/data-catalog/docs/concepts/resource-project) for more information). + rpc DeleteEntry(DeleteEntryRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { + delete: "/v1beta1/{name=projects/*/locations/*/entryGroups/*/entries/*}" + }; + option (google.api.method_signature) = "name"; } // Gets an entry. @@ -75,6 +148,7 @@ service DataCatalog { option (google.api.http) = { get: "/v1beta1/{name=projects/*/locations/*/entryGroups/*/entries/*}" }; + option (google.api.method_signature) = "name"; } // Get an entry by target resource name. This method allows clients to use @@ -86,12 +160,17 @@ service DataCatalog { }; } - // Creates a tag template. + // Creates a tag template. The user should enable the Data Catalog API in + // the project identified by the `parent` parameter (see [Data Catalog + // Resource Project](/data-catalog/docs/concepts/resource-project) for more + // information). rpc CreateTagTemplate(CreateTagTemplateRequest) returns (TagTemplate) { option (google.api.http) = { post: "/v1beta1/{parent=projects/*/locations/*}/tagTemplates" body: "tag_template" }; + option (google.api.method_signature) = + "parent,tag_template_id,tag_template"; } // Gets a tag template. @@ -99,63 +178,104 @@ service DataCatalog { option (google.api.http) = { get: "/v1beta1/{name=projects/*/locations/*/tagTemplates/*}" }; + option (google.api.method_signature) = "name"; } // Updates a tag template. This method cannot be used to update the fields of // a template. The tag template fields are represented as separate resources // and should be updated using their own create/update/delete methods. + // The user should enable the Data Catalog API in the project identified by + // the `tag_template.name` parameter (see [Data Catalog Resource Project] + // (/data-catalog/docs/concepts/resource-project) for more information). rpc UpdateTagTemplate(UpdateTagTemplateRequest) returns (TagTemplate) { option (google.api.http) = { patch: "/v1beta1/{tag_template.name=projects/*/locations/*/tagTemplates/*}" body: "tag_template" }; + option (google.api.method_signature) = "tag_template"; + option (google.api.method_signature) = "tag_template,update_mask"; } // Deletes a tag template and all tags using the template. - rpc DeleteTagTemplate(DeleteTagTemplateRequest) returns (google.protobuf.Empty) { + // The user should enable the Data Catalog API in the project identified by + // the `name` parameter (see [Data Catalog Resource Project] + // (/data-catalog/docs/concepts/resource-project) for more information). + rpc DeleteTagTemplate(DeleteTagTemplateRequest) + returns (google.protobuf.Empty) { option (google.api.http) = { delete: "/v1beta1/{name=projects/*/locations/*/tagTemplates/*}" }; + option (google.api.method_signature) = "name,force"; } - // Creates a field in a tag template. - rpc CreateTagTemplateField(CreateTagTemplateFieldRequest) returns (TagTemplateField) { + // Creates a field in a tag template. The user should enable the Data Catalog + // API in the project identified by the `parent` parameter (see + // [Data Catalog Resource + // Project](/data-catalog/docs/concepts/resource-project) for more + // information). + rpc CreateTagTemplateField(CreateTagTemplateFieldRequest) + returns (TagTemplateField) { option (google.api.http) = { post: "/v1beta1/{parent=projects/*/locations/*/tagTemplates/*}/fields" body: "tag_template_field" }; + option (google.api.method_signature) = + "parent,tag_template_field_id,tag_template_field"; } // Updates a field in a tag template. This method cannot be used to update the - // field type. - rpc UpdateTagTemplateField(UpdateTagTemplateFieldRequest) returns (TagTemplateField) { + // field type. The user should enable the Data Catalog API in the project + // identified by the `name` parameter (see [Data Catalog Resource Project] + // (/data-catalog/docs/concepts/resource-project) for more information). + rpc UpdateTagTemplateField(UpdateTagTemplateFieldRequest) + returns (TagTemplateField) { option (google.api.http) = { patch: "/v1beta1/{name=projects/*/locations/*/tagTemplates/*/fields/*}" body: "tag_template_field" }; + option (google.api.method_signature) = "name,tag_template_field"; + option (google.api.method_signature) = + "name,tag_template_field,update_mask"; } - // Renames a field in a tag template. - rpc RenameTagTemplateField(RenameTagTemplateFieldRequest) returns (TagTemplateField) { + // Renames a field in a tag template. The user should enable the Data Catalog + // API in the project identified by the `name` parameter (see [Data Catalog + // Resource Project](/data-catalog/docs/concepts/resource-project) for more + // information). + rpc RenameTagTemplateField(RenameTagTemplateFieldRequest) + returns (TagTemplateField) { option (google.api.http) = { post: "/v1beta1/{name=projects/*/locations/*/tagTemplates/*/fields/*}:rename" body: "*" }; + option (google.api.method_signature) = "name,new_tag_template_field_id"; } // Deletes a field in a tag template and all uses of that field. - rpc DeleteTagTemplateField(DeleteTagTemplateFieldRequest) returns (google.protobuf.Empty) { + // The user should enable the Data Catalog API in the project identified by + // the `name` parameter (see [Data Catalog Resource Project] + // (/data-catalog/docs/concepts/resource-project) for more information). + rpc DeleteTagTemplateField(DeleteTagTemplateFieldRequest) + returns (google.protobuf.Empty) { option (google.api.http) = { delete: "/v1beta1/{name=projects/*/locations/*/tagTemplates/*/fields/*}" }; + option (google.api.method_signature) = "name,force"; } // Creates a tag on an [Entry][google.cloud.datacatalog.v1beta1.Entry]. + // Note: The project identified by the `parent` parameter for the + // [tag](/data-catalog/docs/reference/rest/v1beta1/projects.locations.entryGroups.entries.tags/create#path-parameters) + // and the + // [tag + // template](/data-catalog/docs/reference/rest/v1beta1/projects.locations.tagTemplates/create#path-parameters) + // used to create the tag must be from the same organization. rpc CreateTag(CreateTagRequest) returns (Tag) { option (google.api.http) = { post: "/v1beta1/{parent=projects/*/locations/*/entryGroups/*/entries/*}/tags" body: "tag" }; + option (google.api.method_signature) = "parent,tag"; } // Updates an existing tag. @@ -164,6 +284,8 @@ service DataCatalog { patch: "/v1beta1/{tag.name=projects/*/locations/*/entryGroups/*/entries/*/tags/*}" body: "tag" }; + option (google.api.method_signature) = "tag"; + option (google.api.method_signature) = "tag,update_mask"; } // Deletes a tag. @@ -171,6 +293,7 @@ service DataCatalog { option (google.api.http) = { delete: "/v1beta1/{name=projects/*/locations/*/entryGroups/*/entries/*/tags/*}" }; + option (google.api.method_signature) = "name"; } // Lists the tags on an [Entry][google.cloud.datacatalog.v1beta1.Entry]. @@ -178,22 +301,37 @@ service DataCatalog { option (google.api.http) = { get: "/v1beta1/{parent=projects/*/locations/*/entryGroups/*/entries/*}/tags" }; + option (google.api.method_signature) = "parent"; } // Sets the access control policy for a resource. Replaces any existing // policy. // Supported resources are: // - Tag templates. + // - Entries. + // - Entry groups. // Note, this method cannot be used to manage policies for BigQuery, Cloud // Pub/Sub and any external Google Cloud Platform resources synced to Cloud // Data Catalog. // // Callers must have following Google IAM permission - // `datacatalog.tagTemplates.setIamPolicy` to set policies on tag templates. - rpc SetIamPolicy(google.iam.v1.SetIamPolicyRequest) returns (google.iam.v1.Policy) { + // - `datacatalog.tagTemplates.setIamPolicy` to set policies on tag + // templates. + // - `datacatalog.entries.setIamPolicy` to set policies on entries. + // - `datacatalog.entryGroups.setIamPolicy` to set policies on entry groups. + rpc SetIamPolicy(google.iam.v1.SetIamPolicyRequest) + returns (google.iam.v1.Policy) { option (google.api.http) = { post: "/v1beta1/{resource=projects/*/locations/*/tagTemplates/*}:setIamPolicy" body: "*" + additional_bindings { + post: "/v1beta1/{resource=projects/*/locations/*/entryGroups/*}:setIamPolicy" + body: "*" + } + additional_bindings { + post: "/v1beta1/{resource=projects/*/locations/*/entryGroups/*/entries/*}:setIamPolicy" + body: "*" + } }; } @@ -203,16 +341,30 @@ service DataCatalog { // // Supported resources are: // - Tag templates. + // - Entries. + // - Entry groups. // Note, this method cannot be used to manage policies for BigQuery, Cloud // Pub/Sub and any external Google Cloud Platform resources synced to Cloud // Data Catalog. // // Callers must have following Google IAM permission - // `datacatalog.tagTemplates.getIamPolicy` to get policies on tag templates. - rpc GetIamPolicy(google.iam.v1.GetIamPolicyRequest) returns (google.iam.v1.Policy) { + // - `datacatalog.tagTemplates.getIamPolicy` to get policies on tag + // templates. + // - `datacatalog.entries.getIamPolicy` to get policies on entries. + // - `datacatalog.entryGroups.getIamPolicy` to get policies on entry groups. + rpc GetIamPolicy(google.iam.v1.GetIamPolicyRequest) + returns (google.iam.v1.Policy) { option (google.api.http) = { post: "/v1beta1/{resource=projects/*/locations/*/tagTemplates/*}:getIamPolicy" body: "*" + additional_bindings { + post: "/v1beta1/{resource=projects/*/locations/*/entryGroups/*}:getIamPolicy" + body: "*" + } + additional_bindings { + post: "/v1beta1/{resource=projects/*/locations/*/entryGroups/*/entries/*}:getIamPolicy" + body: "*" + } }; } @@ -220,18 +372,29 @@ service DataCatalog { // If the resource does not exist, an empty set of permissions is returned // (We don't return a `NOT_FOUND` error). // - // Supported resource are: - // - tag templates. + // Supported resources are: + // - Tag templates. + // - Entries. + // - Entry groups. // Note, this method cannot be used to manage policies for BigQuery, Cloud // Pub/Sub and any external Google Cloud Platform resources synced to Cloud // Data Catalog. // // A caller is not required to have Google IAM permission to make this // request. - rpc TestIamPermissions(google.iam.v1.TestIamPermissionsRequest) returns (google.iam.v1.TestIamPermissionsResponse) { + rpc TestIamPermissions(google.iam.v1.TestIamPermissionsRequest) + returns (google.iam.v1.TestIamPermissionsResponse) { option (google.api.http) = { post: "/v1beta1/{resource=projects/*/locations/*/tagTemplates/*}:testIamPermissions" body: "*" + additional_bindings { + post: "/v1beta1/{resource=projects/*/locations/*/entryGroups/*}:testIamPermissions" + body: "*" + } + additional_bindings { + post: "/v1beta1/{resource=projects/*/locations/*/entryGroups/*/entries/*}:testIamPermissions" + body: "*" + } }; } } @@ -242,21 +405,21 @@ message SearchCatalogRequest { message Scope { // Data Catalog tries to automatically choose the right corpus of data to // search through. You can ensure an organization is included by adding it - // to "include_org_ids". You can ensure a project's org is included with - // "include_project_ids". You must specify at least one organization - // using "include_org_ids" or "include_project_ids" in all search requests. + // to `include_org_ids`. You can ensure a project's org is included with + // `include_project_ids`. You must specify at least one organization + // using `include_org_ids` or `include_project_ids` in all search requests. // // List of organization IDs to search within. To find your organization ID, // follow instructions in - // https://cloud.google.com/resource-manager/docs/creating-managing-organization + // https://cloud.google.com/resource-manager/docs/creating-managing-organization. repeated string include_org_ids = 2; // List of project IDs to search within. To learn more about the // distinction between project names/IDs/numbers, go to - // https://cloud.google.com/docs/overview/#projects + // https://cloud.google.com/docs/overview/#projects. repeated string include_project_ids = 3; - // If true, include Google Cloud Platform (GCP) public datasets in the + // If `true`, include Google Cloud Platform (GCP) public datasets in the // search results. Info on GCP public datasets is available at // https://cloud.google.com/public-datasets/. By default, GCP public // datasets are excluded. @@ -264,7 +427,7 @@ message SearchCatalogRequest { } // Required. The scope of this search request. - Scope scope = 6; + Scope scope = 6 [(google.api.field_behavior) = REQUIRED]; // Required. The query string in search query syntax. The query must be // non-empty. @@ -278,36 +441,37 @@ message SearchCatalogRequest { // Note: Query tokens need to have a minimum of 3 characters for substring // matching to work correctly. See [Data Catalog Search // Syntax](/data-catalog/docs/how-to/search-reference) for more information. - string query = 1; + string query = 1 [(google.api.field_behavior) = REQUIRED]; // Number of results in the search page. If <=0 then defaults to 10. Max limit // for page_size is 1000. Throws an invalid argument for page_size > 1000. int32 page_size = 2; - // Optional pagination token returned in an earlier - // [SearchCatalogResponse.next_page_token][google.cloud.datacatalog.v1beta1.DataCatalog.SearchCatalogResponse.next_page_token]; - // indicates that this is a continuation of a prior - // [SearchCatalog][google.cloud.datacatalog.v1beta1.DataCatalog.SearchCatalog] - // call, and that the system should return the next page of data. If empty - // then the first page is returned. - string page_token = 3; + // Optional. Pagination token returned in an earlier + // [SearchCatalogResponse.next_page_token][google.cloud.datacatalog.v1beta1.SearchCatalogResponse.next_page_token], + // which indicates that this is a continuation of a prior + // [SearchCatalogRequest][google.cloud.datacatalog.v1beta1.DataCatalog.SearchCatalog] + // call, and that the system should return the next page of data. If empty, + // the first page is returned. + string page_token = 3 [(google.api.field_behavior) = OPTIONAL]; // Specifies the ordering of results, currently supported case-sensitive // choices are: - //
    - //
  • relevance
  • - //
  • last_access_timestamp [asc|desc], defaults to descending if not - // specified,
  • - //
  • last_modified_timestamp [asc|desc], defaults to descending if not - // specified.
  • - //
+ // + // * `relevance`, only supports desecending + // * `last_access_timestamp [asc|desc]`, defaults to descending if not + // specified + // * `last_modified_timestamp [asc|desc]`, defaults to descending if not + // specified + // + // If not specified, defaults to `relevance` descending. string order_by = 5; } // Response message for // [SearchCatalog][google.cloud.datacatalog.v1beta1.DataCatalog.SearchCatalog]. message SearchCatalogResponse { - // Search results in descending order of relevance. + // Search results. repeated SearchCatalogResult results = 1; // The token that can be used to retrieve the next page of results. @@ -315,32 +479,132 @@ message SearchCatalogResponse { } // Request message for -// [UpdateEntry][google.cloud.datacatalog.v1beta1.DataCatalog.UpdateEntry]. -message UpdateEntryRequest { - // Required. The updated Entry. - Entry entry = 1; - - // Optional. The fields to update on the entry. If absent or empty, all - // modifiable fields are updated. +// [CreateEntryGroup][google.cloud.datacatalog.v1beta1.DataCatalog.CreateEntryGroup]. +message CreateEntryGroupRequest { + // Required. The name of the project this entry group is in. Example: // - // Modifiable fields in synced entries: + // * projects/{project_id}/locations/{location} // - // 1. schema (Pub/Sub topics only) + // Note that this EntryGroup and its child resources may not actually be + // stored in the location in this name. + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + child_type: "datacatalog.googleapis.com/EntryGroup" + } + ]; + + // Required. The id of the entry group to create. + string entry_group_id = 3 [(google.api.field_behavior) = REQUIRED]; + + // The entry group to create. Defaults to an empty entry group. + EntryGroup entry_group = 2; +} + +// Request message for +// [GetEntryGroup][google.cloud.datacatalog.v1beta1.DataCatalog.GetEntryGroup]. +message GetEntryGroupRequest { + // Required. The name of the entry group. For example, + // `projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}`. + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "datacatalog.googleapis.com/EntryGroup" + } + ]; + + // The fields to return. If not set or empty, all fields are returned. + google.protobuf.FieldMask read_mask = 2; +} + +// Request message for +// [DeleteEntryGroup][google.cloud.datacatalog.v1beta1.DataCatalog.DeleteEntryGroup]. +message DeleteEntryGroupRequest { + // Required. The name of the entry group. For example, + // `projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}`. + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "datacatalog.googleapis.com/EntryGroup" + } + ]; +} + +// Request message for +// [CreateEntry][google.cloud.datacatalog.v1beta1.DataCatalog.CreateEntry]. +message CreateEntryRequest { + // Required. The name of the entry group this entry is in. Example: // - // Modifiable fields in native entries: + // * projects/{project_id}/locations/{location}/entryGroups/{entry_group_id} // - // 1. display_name - // 2. description - // 3. schema + // Note that this Entry and its child resources may not actually be stored in + // the location in this name. + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "datacatalog.googleapis.com/EntryGroup" + } + ]; + + // Required. The id of the entry to create. + string entry_id = 3 [(google.api.field_behavior) = REQUIRED]; + + // Required. The entry to create. + Entry entry = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// Request message for +// [UpdateEntry][google.cloud.datacatalog.v1beta1.DataCatalog.UpdateEntry]. +message UpdateEntryRequest { + // Required. The updated entry. The "name" field must be set. + Entry entry = 1 [(google.api.field_behavior) = REQUIRED]; + + // The fields to update on the entry. If absent or empty, all modifiable + // fields are updated. + // + // The following fields are modifiable: + // * For entries with type `DATA_STREAM`: + // * `schema` + // * For entries with type `FILESET` + // * `schema` + // * `display_name` + // * `description` + // * `gcs_fileset_spec` + // * `gcs_fileset_spec.file_patterns` google.protobuf.FieldMask update_mask = 2; } +// Request message for +// [DeleteEntry][google.cloud.datacatalog.v1beta1.DataCatalog.DeleteEntry]. +message DeleteEntryRequest { + // Required. The name of the entry. Example: + // + // * projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id} + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "datacatalog.googleapis.com/Entry" + } + ]; +} + // Request message for // [GetEntry][google.cloud.datacatalog.v1beta1.DataCatalog.GetEntry]. message GetEntryRequest { - // Required. The name of the entry. For example, - // "projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id}". - string name = 1; + // Required. The name of the entry. Example: + // + // * projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id} + // + // Entry groups are logical groupings of entries. Currently, users cannot + // create/modify entry groups. They are created by Data Catalog; they include + // `@bigquery` for all BigQuery entries, and `@pubsub` for all Cloud Pub/Sub + // entries. + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "datacatalog.googleapis.com/Entry" + } + ]; } // Request message for @@ -351,215 +615,327 @@ message LookupEntryRequest { oneof target_name { // The full name of the Google Cloud Platform resource the Data Catalog // entry represents. See: - // https://cloud.google.com/apis/design/resource_names#full_resource_name + // https://cloud.google.com/apis/design/resource_names#full_resource_name. // Full names are case-sensitive. // // Examples: - // "//bigquery.googleapis.com/projects/projectId/datasets/datasetId/tables/tableId". - // "//pubsub.googleapis.com/projects/projectId/topics/topicId" + // + // * //bigquery.googleapis.com/projects/projectId/datasets/datasetId/tables/tableId + // * //pubsub.googleapis.com/projects/projectId/topics/topicId string linked_resource = 1; // The SQL name of the entry. SQL names are case-sensitive. // // Examples: - //
    - //
  • cloud_pubsub.project_id.topic_id
  • - //
  • pubsub.project_id.`topic.id.with.dots`
  • - //
  • bigquery.project_id.dataset_id.table_id
  • - //
  • datacatalog.project_id.location_id.entry_group_id.entry_id
  • - //
- // *_ids shoud satisfy the standard SQL rules for identifiers. - // https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical + // + // * `cloud_pubsub.project_id.topic_id` + // * ``pubsub.project_id.`topic.id.with.dots` `` + // * `bigquery.project_id.dataset_id.table_id` + // * `datacatalog.project_id.location_id.entry_group_id.entry_id` + // + // `*_id`s shoud satisfy the standard SQL rules for identifiers. + // https://cloud.google.com/bigquery/docs/reference/standard-sql/lexical. string sql_resource = 3; } } // Entry Metadata. // A Data Catalog Entry resource represents another resource in Google -// Cloud Platform, such as a BigQuery Dataset or a Pub/Sub Topic. Clients can -// use the `linked_resource` field in the Entry resource to refer to the -// original resource id of the source system. +// Cloud Platform, such as a BigQuery dataset or a Cloud Pub/Sub topic. +// Clients can use the `linked_resource` field in the Entry resource to refer to +// the original resource ID of the source system. // // An Entry resource contains resource details, such as its schema. An Entry can // also be used to attach flexible metadata, such as a // [Tag][google.cloud.datacatalog.v1beta1.Tag]. message Entry { - // Required when used in - // [UpdateEntryRequest][google.cloud.datacatalog.v1beta1.UpdateEntryRequest]. - // The Data Catalog resource name of the entry in URL format. For example, - // "projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id}". + option (google.api.resource) = { + type: "datacatalog.googleapis.com/Entry" + pattern: "projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}" + }; + + // The Data Catalog resource name of the entry in URL format. Example: + // + // * projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id} + // // Note that this Entry and its child resources may not actually be stored in // the location in this name. - string name = 1; + string name = 1 [(google.api.resource_reference) = { + type: "datacatalog.googleapis.com/EntryGroup" + }]; - // Output only. The full name of the cloud resource the entry belongs to. See: - // https://cloud.google.com/apis/design/resource_names#full_resource_name + // Output only. The resource this metadata entry refers to. // - // Data Catalog supports resources from select Google Cloud Platform systems. - // `linked_resource` is the full name of the Google Cloud Platform resource. + // For Google Cloud Platform resources, `linked_resource` is the [full name of + // the + // resource](https://cloud.google.com/apis/design/resource_names#full_resource_name). // For example, the `linked_resource` for a table resource from BigQuery is: // - // "//bigquery.googleapis.com/projects/projectId/datasets/datasetId/tables/tableId". - string linked_resource = 9; + // * //bigquery.googleapis.com/projects/projectId/datasets/datasetId/tables/tableId + string linked_resource = 9 [(google.api.field_behavior) = OUTPUT_ONLY]; - // Required. Type of entry. - EntryType type = 2; + // Required. Entry type. + oneof entry_type { + // The type of the entry. + EntryType type = 2; + } - // Optional. Type specification information. + // Type specification information. oneof type_spec { + // Specification that applies to a Cloud Storage fileset. This is only valid + // on entries of type FILESET. + GcsFilesetSpec gcs_fileset_spec = 6; + // Specification that applies to a BigQuery table. This is only valid on - // entries of type TABLE. + // entries of type `TABLE`. BigQueryTableSpec bigquery_table_spec = 12; // Specification for a group of BigQuery tables with name pattern - // [prefix]YYYYMMDD. Context: - // https://cloud.google.com/bigquery/docs/partitioned-tables#partitioning_versus_sharding + // `[prefix]YYYYMMDD`. Context: + // https://cloud.google.com/bigquery/docs/partitioned-tables#partitioning_versus_sharding. BigQueryDateShardedSpec bigquery_date_sharded_spec = 15; } - // Optional. Display information such as title and description. A short name - // to identify the entry, for example, "Analytics Data - Jan 2011". Default - // value is an empty string. + // Display information such as title and description. A short name to identify + // the entry, for example, "Analytics Data - Jan 2011". Default value is an + // empty string. string display_name = 3; - // Optional. Entry description, which can consist of several sentences or - // paragraphs that describe entry contents. Default value is an empty string. + // Entry description, which can consist of several sentences or paragraphs + // that describe entry contents. Default value is an empty string. string description = 4; - // Optional. Schema of the entry. An entry might not have any schema attached - // to it. + // Schema of the entry. An entry might not have any schema attached to it. Schema schema = 5; - // Output only. Timestamps about the underlying Google Cloud Platform resource - // -- not about this Data Catalog Entry. - SystemTimestamps source_system_timestamps = 7; + // Output only. Timestamps about the underlying Google Cloud Platform + // resource, not about this Data Catalog Entry. + SystemTimestamps source_system_timestamps = 7 + [(google.api.field_behavior) = OUTPUT_ONLY]; +} + +// EntryGroup Metadata. +// An EntryGroup resource represents a logical grouping of zero or more +// Data Catalog [Entry][google.cloud.datacatalog.v1beta1.Entry] resources. +message EntryGroup { + option (google.api.resource) = { + type: "datacatalog.googleapis.com/EntryGroup" + pattern: "projects/{project}/locations/{location}/entryGroups/{entry_group}" + }; + + // The resource name of the entry group in URL format. Example: + // + // * projects/{project_id}/locations/{location}/entryGroups/{entry_group_id} + // + // Note that this EntryGroup and its child resources may not actually be + // stored in the location in this name. + string name = 1; + + // A short name to identify the entry group, for example, + // "analytics data - jan 2011". Default value is an empty string. + string display_name = 2; + + // Entry group description, which can consist of several sentences or + // paragraphs that describe entry group contents. Default value is an empty + // string. + string description = 3; + + // Output only. Timestamps about this EntryGroup. Default value is empty + // timestamps. + SystemTimestamps data_catalog_timestamps = 4 + [(google.api.field_behavior) = OUTPUT_ONLY]; } // Request message for // [CreateTagTemplate][google.cloud.datacatalog.v1beta1.DataCatalog.CreateTagTemplate]. message CreateTagTemplateRequest { // Required. The name of the project and the location this template is in. - // Example: "projects/{project_id}/locations/{location}". Note that this + // Example: + // + // * projects/{project_id}/locations/{location} + // // TagTemplate and its child resources may not actually be stored in the // location in this name. - string parent = 1; + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + child_type: "datacatalog.googleapis.com/TagTemplate" + } + ]; // Required. The id of the tag template to create. - string tag_template_id = 3; + string tag_template_id = 3 [(google.api.field_behavior) = REQUIRED]; // Required. The tag template to create. - TagTemplate tag_template = 2; + TagTemplate tag_template = 2 [(google.api.field_behavior) = REQUIRED]; } // Request message for // [GetTagTemplate][google.cloud.datacatalog.v1beta1.DataCatalog.GetTagTemplate]. message GetTagTemplateRequest { - // Required. The name of the tag template. For example, - // "projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id}". - string name = 1; + // Required. The name of the tag template. Example: + // + // * projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id} + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "datacatalog.googleapis.com/TagTemplate" + } + ]; +} + +// Entry resources in Data Catalog can be of different types e.g. a BigQuery +// Table entry is of type `TABLE`. This enum describes all the possible types +// Data Catalog contains. +enum EntryType { + // Default unknown type + ENTRY_TYPE_UNSPECIFIED = 0; + + // Output only. The type of entry that has a GoogleSQL schema, including + // logical views. + TABLE = 2; + + // Output only. An entry type which is used for streaming entries. Example: + // Cloud Pub/Sub topic. + DATA_STREAM = 3; + + // Alpha feature. An entry type which is a set of files or objects. Example: + // Cloud Storage fileset. + FILESET = 4; } // Request message for // [UpdateTagTemplate][google.cloud.datacatalog.v1beta1.DataCatalog.UpdateTagTemplate]. message UpdateTagTemplateRequest { - // Required. The template to update. - TagTemplate tag_template = 1; + // Required. The template to update. The "name" field must be set. + TagTemplate tag_template = 1 [(google.api.field_behavior) = REQUIRED]; - // Optional. The field mask specifies the parts of the template to overwrite. + // The field mask specifies the parts of the template to overwrite. // // Allowed fields: // - // * display_name + // * `display_name` // - // If update_mask is omitted, all of the allowed fields above will be updated. + // If absent or empty, all of the allowed fields above will be updated. google.protobuf.FieldMask update_mask = 2; } // Request message for // [DeleteTagTemplate][google.cloud.datacatalog.v1beta1.DataCatalog.DeleteTagTemplate]. message DeleteTagTemplateRequest { - // Required. The name of the tag template to delete. For example, - // "projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id}". - string name = 1; - - // Required. Currently, this field must always be set to true. + // Required. The name of the tag template to delete. Example: + // + // * projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id} + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "datacatalog.googleapis.com/TagTemplate" + } + ]; + + // Required. Currently, this field must always be set to `true`. // This confirms the deletion of any possible tags using this template. - // force = false will be supported in the future. - bool force = 2; + // `force = false` will be supported in the future. + bool force = 2 [(google.api.field_behavior) = REQUIRED]; } // Request message for // [CreateTag][google.cloud.datacatalog.v1beta1.DataCatalog.CreateTag]. message CreateTagRequest { - // Required. - // The name of the resource to attach this tag to. Tags can be attached to - // Entries. (example: - // "projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id}"). + // Required. The name of the resource to attach this tag to. Tags can be + // attached to Entries. Example: + // + // * projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id} + // // Note that this Tag and its child resources may not actually be stored in // the location in this name. - string parent = 1; + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { type: "datacatalog.googleapis.com/Tag" } + ]; // Required. The tag to create. - Tag tag = 2; + Tag tag = 2 [(google.api.field_behavior) = REQUIRED]; } // Request message for // [UpdateTag][google.cloud.datacatalog.v1beta1.DataCatalog.UpdateTag]. message UpdateTagRequest { - // Required. The updated tag. - Tag tag = 1; + // Required. The updated tag. The "name" field must be set. + Tag tag = 1 [(google.api.field_behavior) = REQUIRED]; - // Optional. The fields to update on the Tag. If absent or empty, all - // modifiable fields are updated. Currently the only modifiable field is the - // field `fields`. + // The fields to update on the Tag. If absent or empty, all modifiable fields + // are updated. Currently the only modifiable field is the field `fields`. google.protobuf.FieldMask update_mask = 2; } // Request message for // [DeleteTag][google.cloud.datacatalog.v1beta1.DataCatalog.DeleteTag]. message DeleteTagRequest { - // Required. The name of the tag to delete. For example, - // "projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id}/tags/{tag_id}". - string name = 1; + // Required. The name of the tag to delete. Example: + // + // * projects/{project_id}/locations/{location}/entryGroups/{entry_group_id}/entries/{entry_id}/tags/{tag_id} + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + child_type: "datacatalog.googleapis.com/Tag" + } + ]; } // Request message for // [CreateTagTemplateField][google.cloud.datacatalog.v1beta1.DataCatalog.CreateTagTemplateField]. message CreateTagTemplateFieldRequest { // Required. The name of the project this template is in. Example: - // "projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id}". + // + // * projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id} + // // Note that this TagTemplateField may not actually be stored in the location // in this name. - string parent = 1; - - // Required. The id of the tag template field to create. + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "datacatalog.googleapis.com/TagTemplate" + } + ]; + + // Required. The ID of the tag template field to create. // Field ids can contain letters (both uppercase and lowercase), numbers - // (0-9), underscores (_) and dashes (-). Field ids must be at least 1 - // character long and at most 128 characters long. Field ids must also be - // unique to their template. - string tag_template_field_id = 2; + // (0-9), underscores (_) and dashes (-). Field IDs must be at least 1 + // character long and at most 128 characters long. Field IDs must also be + // unique within their template. + string tag_template_field_id = 2 [(google.api.field_behavior) = REQUIRED]; // Required. The tag template field to create. - TagTemplateField tag_template_field = 3; + TagTemplateField tag_template_field = 3 + [(google.api.field_behavior) = REQUIRED]; } // Request message for // [UpdateTagTemplateField][google.cloud.datacatalog.v1beta1.DataCatalog.UpdateTagTemplateField]. message UpdateTagTemplateFieldRequest { - // Required. The name of the tag template field. For example, - // "projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id}/fields/{tag_template_field_id}". - string name = 1; + // Required. The name of the tag template field. Example: + // + // * projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id}/fields/{tag_template_field_id} + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "datacatalog.googleapis.com/TagTemplateField" + } + ]; // Required. The template to update. - TagTemplateField tag_template_field = 2; + TagTemplateField tag_template_field = 2 + [(google.api.field_behavior) = REQUIRED]; - // Optional. The field mask specifies the parts of the template to overwrite. + // The field mask specifies the parts of the template to be updated. // Allowed fields: // - // * display_name - // * type.enum_type + // * `display_name` + // * `type.enum_type` // - // If update_mask is omitted, all of the allowed fields above will be updated. + // If `update_mask` is not set or empty, all of the allowed fields above will + // be updated. // // When updating an enum type, the provided values will be merged with the // existing values. Therefore, enum values can only be added, existing enum @@ -570,40 +946,38 @@ message UpdateTagTemplateFieldRequest { // Request message for // [RenameTagTemplateField][google.cloud.datacatalog.v1beta1.DataCatalog.RenameTagTemplateField]. message RenameTagTemplateFieldRequest { - // Required. The name of the tag template. For example, - // "projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id}/fields/{tag_template_field_id}". - string name = 1; + // Required. The name of the tag template. Example: + // + // * projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id}/fields/{tag_template_field_id} + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "datacatalog.googleapis.com/TagTemplateField" + } + ]; // Required. The new ID of this tag template field. For example, - // "my_new_field". - string new_tag_template_field_id = 2; -} - -// Entry resources in Data Catalog can be of different types e.g. BigQuery -// Table entry is of type 'TABLE'. This enum describes all the possible types -// Data Catalog contains. -enum EntryType { - // Default unknown type - ENTRY_TYPE_UNSPECIFIED = 0; - - // The type of entry that has a GoogleSQL schema, including logical views. - TABLE = 2; - - // An entry type which is used for streaming entries. Example - Pub/Sub. - DATA_STREAM = 3; + // `my_new_field`. + string new_tag_template_field_id = 2 [(google.api.field_behavior) = REQUIRED]; } // Request message for // [DeleteTagTemplateField][google.cloud.datacatalog.v1beta1.DataCatalog.DeleteTagTemplateField]. message DeleteTagTemplateFieldRequest { - // Required. The name of the tag template field to delete. For example, - // "projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id}/fields/{tag_template_field_id}". - string name = 1; - - // Required. Currently, this field must always be set to true. + // Required. The name of the tag template field to delete. Example: + // + // * projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id}/fields/{tag_template_field_id} + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "datacatalog.googleapis.com/TagTemplateField" + } + ]; + + // Required. Currently, this field must always be set to `true`. // This confirms the deletion of this field from any tags using this field. - // force = false will be supported in the future. - bool force = 2; + // `force = false` will be supported in the future. + bool force = 2 [(google.api.field_behavior) = REQUIRED]; } // Request message for @@ -611,14 +985,18 @@ message DeleteTagTemplateFieldRequest { message ListTagsRequest { // Required. The name of the Data Catalog resource to list the tags of. The // resource could be an [Entry][google.cloud.datacatalog.v1beta1.Entry]. - string parent = 1; - - // Optional. The maximum number of tags to return. Default is 10. Max limit is - // 1000. + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + child_type: "datacatalog.googleapis.com/Tag" + } + ]; + + // The maximum number of tags to return. Default is 10. Max limit is 1000. int32 page_size = 2; - // Optional. Token that specifies which page is requested. If empty, the first - // page is returned. + // Token that specifies which page is requested. If empty, the first page is + // returned. string page_token = 3; } diff --git a/datacatalog/google/cloud/datacatalog_v1beta1/proto/datacatalog_pb2.py b/datacatalog/google/cloud/datacatalog_v1beta1/proto/datacatalog_pb2.py index 10641f51e1ab..01f2e79352d9 100644 --- a/datacatalog/google/cloud/datacatalog_v1beta1/proto/datacatalog_pb2.py +++ b/datacatalog/google/cloud/datacatalog_v1beta1/proto/datacatalog_pb2.py @@ -17,6 +17,12 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 +from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 +from google.cloud.datacatalog_v1beta1.proto import ( + gcs_fileset_spec_pb2 as google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_gcs__fileset__spec__pb2, +) from google.cloud.datacatalog_v1beta1.proto import ( schema_pb2 as google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_schema__pb2, ) @@ -36,7 +42,6 @@ from google.iam.v1 import policy_pb2 as google_dot_iam_dot_v1_dot_policy__pb2 from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 -from google.api import client_pb2 as google_dot_api_dot_client__pb2 DESCRIPTOR = _descriptor.FileDescriptor( @@ -47,10 +52,14 @@ "\n\034com.google.cloud.datacatalogP\001ZKgoogle.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1;datacatalog\370\001\001" ), serialized_pb=_b( - '\n8google/cloud/datacatalog_v1beta1/proto/datacatalog.proto\x12 google.cloud.datacatalog.v1beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x33google/cloud/datacatalog_v1beta1/proto/schema.proto\x1a\x33google/cloud/datacatalog_v1beta1/proto/search.proto\x1a\x37google/cloud/datacatalog_v1beta1/proto/table_spec.proto\x1a\x31google/cloud/datacatalog_v1beta1/proto/tags.proto\x1a\x37google/cloud/datacatalog_v1beta1/proto/timestamps.proto\x1a\x1egoogle/iam/v1/iam_policy.proto\x1a\x1agoogle/iam/v1/policy.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x17google/api/client.proto"\x8f\x02\n\x14SearchCatalogRequest\x12K\n\x05scope\x18\x06 \x01(\x0b\x32<.google.cloud.datacatalog.v1beta1.SearchCatalogRequest.Scope\x12\r\n\x05query\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\x12\x10\n\x08order_by\x18\x05 \x01(\t\x1a\x62\n\x05Scope\x12\x17\n\x0finclude_org_ids\x18\x02 \x03(\t\x12\x1b\n\x13include_project_ids\x18\x03 \x03(\t\x12#\n\x1binclude_gcp_public_datasets\x18\x07 \x01(\x08"x\n\x15SearchCatalogResponse\x12\x46\n\x07results\x18\x01 \x03(\x0b\x32\x35.google.cloud.datacatalog.v1beta1.SearchCatalogResult\x12\x17\n\x0fnext_page_token\x18\x03 \x01(\t"}\n\x12UpdateEntryRequest\x12\x36\n\x05\x65ntry\x18\x01 \x01(\x0b\x32\'.google.cloud.datacatalog.v1beta1.Entry\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"\x1f\n\x0fGetEntryRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"V\n\x12LookupEntryRequest\x12\x19\n\x0flinked_resource\x18\x01 \x01(\tH\x00\x12\x16\n\x0csql_resource\x18\x03 \x01(\tH\x00\x42\r\n\x0btarget_name"\xe6\x03\n\x05\x45ntry\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x17\n\x0flinked_resource\x18\t \x01(\t\x12\x39\n\x04type\x18\x02 \x01(\x0e\x32+.google.cloud.datacatalog.v1beta1.EntryType\x12R\n\x13\x62igquery_table_spec\x18\x0c \x01(\x0b\x32\x33.google.cloud.datacatalog.v1beta1.BigQueryTableSpecH\x00\x12_\n\x1a\x62igquery_date_sharded_spec\x18\x0f \x01(\x0b\x32\x39.google.cloud.datacatalog.v1beta1.BigQueryDateShardedSpecH\x00\x12\x14\n\x0c\x64isplay_name\x18\x03 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x04 \x01(\t\x12\x38\n\x06schema\x18\x05 \x01(\x0b\x32(.google.cloud.datacatalog.v1beta1.Schema\x12T\n\x18source_system_timestamps\x18\x07 \x01(\x0b\x32\x32.google.cloud.datacatalog.v1beta1.SystemTimestampsB\x0b\n\ttype_spec"\x88\x01\n\x18\x43reateTagTemplateRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x17\n\x0ftag_template_id\x18\x03 \x01(\t\x12\x43\n\x0ctag_template\x18\x02 \x01(\x0b\x32-.google.cloud.datacatalog.v1beta1.TagTemplate"%\n\x15GetTagTemplateRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"\x90\x01\n\x18UpdateTagTemplateRequest\x12\x43\n\x0ctag_template\x18\x01 \x01(\x0b\x32-.google.cloud.datacatalog.v1beta1.TagTemplate\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"7\n\x18\x44\x65leteTagTemplateRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05\x66orce\x18\x02 \x01(\x08"V\n\x10\x43reateTagRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x32\n\x03tag\x18\x02 \x01(\x0b\x32%.google.cloud.datacatalog.v1beta1.Tag"w\n\x10UpdateTagRequest\x12\x32\n\x03tag\x18\x01 \x01(\x0b\x32%.google.cloud.datacatalog.v1beta1.Tag\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask" \n\x10\x44\x65leteTagRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"\x9e\x01\n\x1d\x43reateTagTemplateFieldRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x1d\n\x15tag_template_field_id\x18\x02 \x01(\t\x12N\n\x12tag_template_field\x18\x03 \x01(\x0b\x32\x32.google.cloud.datacatalog.v1beta1.TagTemplateField"\xae\x01\n\x1dUpdateTagTemplateFieldRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12N\n\x12tag_template_field\x18\x02 \x01(\x0b\x32\x32.google.cloud.datacatalog.v1beta1.TagTemplateField\x12/\n\x0bupdate_mask\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"P\n\x1dRenameTagTemplateFieldRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12!\n\x19new_tag_template_field_id\x18\x02 \x01(\t"<\n\x1d\x44\x65leteTagTemplateFieldRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05\x66orce\x18\x02 \x01(\x08"H\n\x0fListTagsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"`\n\x10ListTagsResponse\x12\x33\n\x04tags\x18\x01 \x03(\x0b\x32%.google.cloud.datacatalog.v1beta1.Tag\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t*C\n\tEntryType\x12\x1a\n\x16\x45NTRY_TYPE_UNSPECIFIED\x10\x00\x12\t\n\x05TABLE\x10\x02\x12\x0f\n\x0b\x44\x41TA_STREAM\x10\x03\x32\xf8\x1c\n\x0b\x44\x61taCatalog\x12\xa4\x01\n\rSearchCatalog\x12\x36.google.cloud.datacatalog.v1beta1.SearchCatalogRequest\x1a\x37.google.cloud.datacatalog.v1beta1.SearchCatalogResponse""\x82\xd3\xe4\x93\x02\x1c"\x17/v1beta1/catalog:search:\x01*\x12\xc1\x01\n\x0bUpdateEntry\x12\x34.google.cloud.datacatalog.v1beta1.UpdateEntryRequest\x1a\'.google.cloud.datacatalog.v1beta1.Entry"S\x82\xd3\xe4\x93\x02M2D/v1beta1/{entry.name=projects/*/locations/*/entryGroups/*/entries/*}:\x05\x65ntry\x12\xae\x01\n\x08GetEntry\x12\x31.google.cloud.datacatalog.v1beta1.GetEntryRequest\x1a\'.google.cloud.datacatalog.v1beta1.Entry"F\x82\xd3\xe4\x93\x02@\x12>/v1beta1/{name=projects/*/locations/*/entryGroups/*/entries/*}\x12\x8d\x01\n\x0bLookupEntry\x12\x34.google.cloud.datacatalog.v1beta1.LookupEntryRequest\x1a\'.google.cloud.datacatalog.v1beta1.Entry"\x1f\x82\xd3\xe4\x93\x02\x19\x12\x17/v1beta1/entries:lookup\x12\xcb\x01\n\x11\x43reateTagTemplate\x12:.google.cloud.datacatalog.v1beta1.CreateTagTemplateRequest\x1a-.google.cloud.datacatalog.v1beta1.TagTemplate"K\x82\xd3\xe4\x93\x02\x45"5/v1beta1/{parent=projects/*/locations/*}/tagTemplates:\x0ctag_template\x12\xb7\x01\n\x0eGetTagTemplate\x12\x37.google.cloud.datacatalog.v1beta1.GetTagTemplateRequest\x1a-.google.cloud.datacatalog.v1beta1.TagTemplate"=\x82\xd3\xe4\x93\x02\x37\x12\x35/v1beta1/{name=projects/*/locations/*/tagTemplates/*}\x12\xd8\x01\n\x11UpdateTagTemplate\x12:.google.cloud.datacatalog.v1beta1.UpdateTagTemplateRequest\x1a-.google.cloud.datacatalog.v1beta1.TagTemplate"X\x82\xd3\xe4\x93\x02R2B/v1beta1/{tag_template.name=projects/*/locations/*/tagTemplates/*}:\x0ctag_template\x12\xa6\x01\n\x11\x44\x65leteTagTemplate\x12:.google.cloud.datacatalog.v1beta1.DeleteTagTemplateRequest\x1a\x16.google.protobuf.Empty"=\x82\xd3\xe4\x93\x02\x37*5/v1beta1/{name=projects/*/locations/*/tagTemplates/*}\x12\xe9\x01\n\x16\x43reateTagTemplateField\x12?.google.cloud.datacatalog.v1beta1.CreateTagTemplateFieldRequest\x1a\x32.google.cloud.datacatalog.v1beta1.TagTemplateField"Z\x82\xd3\xe4\x93\x02T">/v1beta1/{parent=projects/*/locations/*/tagTemplates/*}/fields:\x12tag_template_field\x12\xe9\x01\n\x16UpdateTagTemplateField\x12?.google.cloud.datacatalog.v1beta1.UpdateTagTemplateFieldRequest\x1a\x32.google.cloud.datacatalog.v1beta1.TagTemplateField"Z\x82\xd3\xe4\x93\x02T2>/v1beta1/{name=projects/*/locations/*/tagTemplates/*/fields/*}:\x12tag_template_field\x12\xdf\x01\n\x16RenameTagTemplateField\x12?.google.cloud.datacatalog.v1beta1.RenameTagTemplateFieldRequest\x1a\x32.google.cloud.datacatalog.v1beta1.TagTemplateField"P\x82\xd3\xe4\x93\x02J"E/v1beta1/{name=projects/*/locations/*/tagTemplates/*/fields/*}:rename:\x01*\x12\xb9\x01\n\x16\x44\x65leteTagTemplateField\x12?.google.cloud.datacatalog.v1beta1.DeleteTagTemplateFieldRequest\x1a\x16.google.protobuf.Empty"F\x82\xd3\xe4\x93\x02@*>/v1beta1/{name=projects/*/locations/*/tagTemplates/*/fields/*}\x12\xba\x01\n\tCreateTag\x12\x32.google.cloud.datacatalog.v1beta1.CreateTagRequest\x1a%.google.cloud.datacatalog.v1beta1.Tag"R\x82\xd3\xe4\x93\x02L"E/v1beta1/{parent=projects/*/locations/*/entryGroups/*/entries/*}/tags:\x03tag\x12\xbe\x01\n\tUpdateTag\x12\x32.google.cloud.datacatalog.v1beta1.UpdateTagRequest\x1a%.google.cloud.datacatalog.v1beta1.Tag"V\x82\xd3\xe4\x93\x02P2I/v1beta1/{tag.name=projects/*/locations/*/entryGroups/*/entries/*/tags/*}:\x03tag\x12\xa6\x01\n\tDeleteTag\x12\x32.google.cloud.datacatalog.v1beta1.DeleteTagRequest\x1a\x16.google.protobuf.Empty"M\x82\xd3\xe4\x93\x02G*E/v1beta1/{name=projects/*/locations/*/entryGroups/*/entries/*/tags/*}\x12\xc0\x01\n\x08ListTags\x12\x31.google.cloud.datacatalog.v1beta1.ListTagsRequest\x1a\x32.google.cloud.datacatalog.v1beta1.ListTagsResponse"M\x82\xd3\xe4\x93\x02G\x12\x45/v1beta1/{parent=projects/*/locations/*/entryGroups/*/entries/*}/tags\x12\x9c\x01\n\x0cSetIamPolicy\x12".google.iam.v1.SetIamPolicyRequest\x1a\x15.google.iam.v1.Policy"Q\x82\xd3\xe4\x93\x02K"F/v1beta1/{resource=projects/*/locations/*/tagTemplates/*}:setIamPolicy:\x01*\x12\x9c\x01\n\x0cGetIamPolicy\x12".google.iam.v1.GetIamPolicyRequest\x1a\x15.google.iam.v1.Policy"Q\x82\xd3\xe4\x93\x02K"F/v1beta1/{resource=projects/*/locations/*/tagTemplates/*}:getIamPolicy:\x01*\x12\xc2\x01\n\x12TestIamPermissions\x12(.google.iam.v1.TestIamPermissionsRequest\x1a).google.iam.v1.TestIamPermissionsResponse"W\x82\xd3\xe4\x93\x02Q"L/v1beta1/{resource=projects/*/locations/*/tagTemplates/*}:testIamPermissions:\x01*\x1aN\xca\x41\x1a\x64\x61tacatalog.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformBp\n\x1c\x63om.google.cloud.datacatalogP\x01ZKgoogle.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1;datacatalog\xf8\x01\x01\x62\x06proto3' + '\n8google/cloud/datacatalog_v1beta1/proto/datacatalog.proto\x12 google.cloud.datacatalog.v1beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a=google/cloud/datacatalog_v1beta1/proto/gcs_fileset_spec.proto\x1a\x33google/cloud/datacatalog_v1beta1/proto/schema.proto\x1a\x33google/cloud/datacatalog_v1beta1/proto/search.proto\x1a\x37google/cloud/datacatalog_v1beta1/proto/table_spec.proto\x1a\x31google/cloud/datacatalog_v1beta1/proto/tags.proto\x1a\x37google/cloud/datacatalog_v1beta1/proto/timestamps.proto\x1a\x1egoogle/iam/v1/iam_policy.proto\x1a\x1agoogle/iam/v1/policy.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto"\x9e\x02\n\x14SearchCatalogRequest\x12P\n\x05scope\x18\x06 \x01(\x0b\x32<.google.cloud.datacatalog.v1beta1.SearchCatalogRequest.ScopeB\x03\xe0\x41\x02\x12\x12\n\x05query\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x17\n\npage_token\x18\x03 \x01(\tB\x03\xe0\x41\x01\x12\x10\n\x08order_by\x18\x05 \x01(\t\x1a\x62\n\x05Scope\x12\x17\n\x0finclude_org_ids\x18\x02 \x03(\t\x12\x1b\n\x13include_project_ids\x18\x03 \x03(\t\x12#\n\x1binclude_gcp_public_datasets\x18\x07 \x01(\x08"x\n\x15SearchCatalogResponse\x12\x46\n\x07results\x18\x01 \x03(\x0b\x32\x35.google.cloud.datacatalog.v1beta1.SearchCatalogResult\x12\x17\n\x0fnext_page_token\x18\x03 \x01(\t"\xb8\x01\n\x17\x43reateEntryGroupRequest\x12=\n\x06parent\x18\x01 \x01(\tB-\xe0\x41\x02\xfa\x41\'\x12%datacatalog.googleapis.com/EntryGroup\x12\x1b\n\x0e\x65ntry_group_id\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x41\n\x0b\x65ntry_group\x18\x02 \x01(\x0b\x32,.google.cloud.datacatalog.v1beta1.EntryGroup"\x82\x01\n\x14GetEntryGroupRequest\x12;\n\x04name\x18\x01 \x01(\tB-\xe0\x41\x02\xfa\x41\'\n%datacatalog.googleapis.com/EntryGroup\x12-\n\tread_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"V\n\x17\x44\x65leteEntryGroupRequest\x12;\n\x04name\x18\x01 \x01(\tB-\xe0\x41\x02\xfa\x41\'\n%datacatalog.googleapis.com/EntryGroup"\xa7\x01\n\x12\x43reateEntryRequest\x12=\n\x06parent\x18\x01 \x01(\tB-\xe0\x41\x02\xfa\x41\'\n%datacatalog.googleapis.com/EntryGroup\x12\x15\n\x08\x65ntry_id\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12;\n\x05\x65ntry\x18\x02 \x01(\x0b\x32\'.google.cloud.datacatalog.v1beta1.EntryB\x03\xe0\x41\x02"\x82\x01\n\x12UpdateEntryRequest\x12;\n\x05\x65ntry\x18\x01 \x01(\x0b\x32\'.google.cloud.datacatalog.v1beta1.EntryB\x03\xe0\x41\x02\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"L\n\x12\x44\x65leteEntryRequest\x12\x36\n\x04name\x18\x01 \x01(\tB(\xe0\x41\x02\xfa\x41"\n datacatalog.googleapis.com/Entry"I\n\x0fGetEntryRequest\x12\x36\n\x04name\x18\x01 \x01(\tB(\xe0\x41\x02\xfa\x41"\n datacatalog.googleapis.com/Entry"V\n\x12LookupEntryRequest\x12\x19\n\x0flinked_resource\x18\x01 \x01(\tH\x00\x12\x16\n\x0csql_resource\x18\x03 \x01(\tH\x00\x42\r\n\x0btarget_name"\xf4\x05\n\x05\x45ntry\x12\x38\n\x04name\x18\x01 \x01(\tB*\xfa\x41\'\n%datacatalog.googleapis.com/EntryGroup\x12\x1c\n\x0flinked_resource\x18\t \x01(\tB\x03\xe0\x41\x03\x12;\n\x04type\x18\x02 \x01(\x0e\x32+.google.cloud.datacatalog.v1beta1.EntryTypeH\x00\x12L\n\x10gcs_fileset_spec\x18\x06 \x01(\x0b\x32\x30.google.cloud.datacatalog.v1beta1.GcsFilesetSpecH\x01\x12R\n\x13\x62igquery_table_spec\x18\x0c \x01(\x0b\x32\x33.google.cloud.datacatalog.v1beta1.BigQueryTableSpecH\x01\x12_\n\x1a\x62igquery_date_sharded_spec\x18\x0f \x01(\x0b\x32\x39.google.cloud.datacatalog.v1beta1.BigQueryDateShardedSpecH\x01\x12\x14\n\x0c\x64isplay_name\x18\x03 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x04 \x01(\t\x12\x38\n\x06schema\x18\x05 \x01(\x0b\x32(.google.cloud.datacatalog.v1beta1.Schema\x12Y\n\x18source_system_timestamps\x18\x07 \x01(\x0b\x32\x32.google.cloud.datacatalog.v1beta1.SystemTimestampsB\x03\xe0\x41\x03:x\xea\x41u\n datacatalog.googleapis.com/Entry\x12Qprojects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}B\x0c\n\nentry_typeB\x0b\n\ttype_spec"\x8e\x02\n\nEntryGroup\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x02 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\t\x12X\n\x17\x64\x61ta_catalog_timestamps\x18\x04 \x01(\x0b\x32\x32.google.cloud.datacatalog.v1beta1.SystemTimestampsB\x03\xe0\x41\x03:m\xea\x41j\n%datacatalog.googleapis.com/EntryGroup\x12\x41projects/{project}/locations/{location}/entryGroups/{entry_group}"\xc2\x01\n\x18\x43reateTagTemplateRequest\x12>\n\x06parent\x18\x01 \x01(\tB.\xe0\x41\x02\xfa\x41(\x12&datacatalog.googleapis.com/TagTemplate\x12\x1c\n\x0ftag_template_id\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12H\n\x0ctag_template\x18\x02 \x01(\x0b\x32-.google.cloud.datacatalog.v1beta1.TagTemplateB\x03\xe0\x41\x02"U\n\x15GetTagTemplateRequest\x12<\n\x04name\x18\x01 \x01(\tB.\xe0\x41\x02\xfa\x41(\n&datacatalog.googleapis.com/TagTemplate"\x95\x01\n\x18UpdateTagTemplateRequest\x12H\n\x0ctag_template\x18\x01 \x01(\x0b\x32-.google.cloud.datacatalog.v1beta1.TagTemplateB\x03\xe0\x41\x02\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"l\n\x18\x44\x65leteTagTemplateRequest\x12<\n\x04name\x18\x01 \x01(\tB.\xe0\x41\x02\xfa\x41(\n&datacatalog.googleapis.com/TagTemplate\x12\x12\n\x05\x66orce\x18\x02 \x01(\x08\x42\x03\xe0\x41\x02"\x83\x01\n\x10\x43reateTagRequest\x12\x36\n\x06parent\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1e\x64\x61tacatalog.googleapis.com/Tag\x12\x37\n\x03tag\x18\x02 \x01(\x0b\x32%.google.cloud.datacatalog.v1beta1.TagB\x03\xe0\x41\x02"|\n\x10UpdateTagRequest\x12\x37\n\x03tag\x18\x01 \x01(\x0b\x32%.google.cloud.datacatalog.v1beta1.TagB\x03\xe0\x41\x02\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"H\n\x10\x44\x65leteTagRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \x12\x1e\x64\x61tacatalog.googleapis.com/Tag"\xd8\x01\n\x1d\x43reateTagTemplateFieldRequest\x12>\n\x06parent\x18\x01 \x01(\tB.\xe0\x41\x02\xfa\x41(\n&datacatalog.googleapis.com/TagTemplate\x12"\n\x15tag_template_field_id\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12S\n\x12tag_template_field\x18\x03 \x01(\x0b\x32\x32.google.cloud.datacatalog.v1beta1.TagTemplateFieldB\x03\xe0\x41\x02"\xe8\x01\n\x1dUpdateTagTemplateFieldRequest\x12\x41\n\x04name\x18\x01 \x01(\tB3\xe0\x41\x02\xfa\x41-\n+datacatalog.googleapis.com/TagTemplateField\x12S\n\x12tag_template_field\x18\x02 \x01(\x0b\x32\x32.google.cloud.datacatalog.v1beta1.TagTemplateFieldB\x03\xe0\x41\x02\x12/\n\x0bupdate_mask\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"\x8a\x01\n\x1dRenameTagTemplateFieldRequest\x12\x41\n\x04name\x18\x01 \x01(\tB3\xe0\x41\x02\xfa\x41-\n+datacatalog.googleapis.com/TagTemplateField\x12&\n\x19new_tag_template_field_id\x18\x02 \x01(\tB\x03\xe0\x41\x02"v\n\x1d\x44\x65leteTagTemplateFieldRequest\x12\x41\n\x04name\x18\x01 \x01(\tB3\xe0\x41\x02\xfa\x41-\n+datacatalog.googleapis.com/TagTemplateField\x12\x12\n\x05\x66orce\x18\x02 \x01(\x08\x42\x03\xe0\x41\x02"p\n\x0fListTagsRequest\x12\x36\n\x06parent\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \x12\x1e\x64\x61tacatalog.googleapis.com/Tag\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"`\n\x10ListTagsResponse\x12\x33\n\x04tags\x18\x01 \x03(\x0b\x32%.google.cloud.datacatalog.v1beta1.Tag\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t*P\n\tEntryType\x12\x1a\n\x16\x45NTRY_TYPE_UNSPECIFIED\x10\x00\x12\t\n\x05TABLE\x10\x02\x12\x0f\n\x0b\x44\x41TA_STREAM\x10\x03\x12\x0b\n\x07\x46ILESET\x10\x04\x32\xc8+\n\x0b\x44\x61taCatalog\x12\xb2\x01\n\rSearchCatalog\x12\x36.google.cloud.datacatalog.v1beta1.SearchCatalogRequest\x1a\x37.google.cloud.datacatalog.v1beta1.SearchCatalogResponse"0\x82\xd3\xe4\x93\x02\x1c"\x17/v1beta1/catalog:search:\x01*\xda\x41\x0bscope,query\x12\xea\x01\n\x10\x43reateEntryGroup\x12\x39.google.cloud.datacatalog.v1beta1.CreateEntryGroupRequest\x1a,.google.cloud.datacatalog.v1beta1.EntryGroup"m\x82\xd3\xe4\x93\x02\x43"4/v1beta1/{parent=projects/*/locations/*}/entryGroups:\x0b\x65ntry_group\xda\x41!parent,entry_group_id,entry_group\x12\xcb\x01\n\rGetEntryGroup\x12\x36.google.cloud.datacatalog.v1beta1.GetEntryGroupRequest\x1a,.google.cloud.datacatalog.v1beta1.EntryGroup"T\x82\xd3\xe4\x93\x02\x36\x12\x34/v1beta1/{name=projects/*/locations/*/entryGroups/*}\xda\x41\x04name\xda\x41\x0ename,read_mask\x12\xaa\x01\n\x10\x44\x65leteEntryGroup\x12\x39.google.cloud.datacatalog.v1beta1.DeleteEntryGroupRequest\x1a\x16.google.protobuf.Empty"C\x82\xd3\xe4\x93\x02\x36*4/v1beta1/{name=projects/*/locations/*/entryGroups/*}\xda\x41\x04name\x12\xd3\x01\n\x0b\x43reateEntry\x12\x34.google.cloud.datacatalog.v1beta1.CreateEntryRequest\x1a\'.google.cloud.datacatalog.v1beta1.Entry"e\x82\xd3\xe4\x93\x02G">/v1beta1/{parent=projects/*/locations/*/entryGroups/*}/entries:\x05\x65ntry\xda\x41\x15parent,entry_id,entry\x12\xdd\x01\n\x0bUpdateEntry\x12\x34.google.cloud.datacatalog.v1beta1.UpdateEntryRequest\x1a\'.google.cloud.datacatalog.v1beta1.Entry"o\x82\xd3\xe4\x93\x02M2D/v1beta1/{entry.name=projects/*/locations/*/entryGroups/*/entries/*}:\x05\x65ntry\xda\x41\x05\x65ntry\xda\x41\x11\x65ntry,update_mask\x12\xaa\x01\n\x0b\x44\x65leteEntry\x12\x34.google.cloud.datacatalog.v1beta1.DeleteEntryRequest\x1a\x16.google.protobuf.Empty"M\x82\xd3\xe4\x93\x02@*>/v1beta1/{name=projects/*/locations/*/entryGroups/*/entries/*}\xda\x41\x04name\x12\xb5\x01\n\x08GetEntry\x12\x31.google.cloud.datacatalog.v1beta1.GetEntryRequest\x1a\'.google.cloud.datacatalog.v1beta1.Entry"M\x82\xd3\xe4\x93\x02@\x12>/v1beta1/{name=projects/*/locations/*/entryGroups/*/entries/*}\xda\x41\x04name\x12\x8d\x01\n\x0bLookupEntry\x12\x34.google.cloud.datacatalog.v1beta1.LookupEntryRequest\x1a\'.google.cloud.datacatalog.v1beta1.Entry"\x1f\x82\xd3\xe4\x93\x02\x19\x12\x17/v1beta1/entries:lookup\x12\xf1\x01\n\x11\x43reateTagTemplate\x12:.google.cloud.datacatalog.v1beta1.CreateTagTemplateRequest\x1a-.google.cloud.datacatalog.v1beta1.TagTemplate"q\x82\xd3\xe4\x93\x02\x45"5/v1beta1/{parent=projects/*/locations/*}/tagTemplates:\x0ctag_template\xda\x41#parent,tag_template_id,tag_template\x12\xbe\x01\n\x0eGetTagTemplate\x12\x37.google.cloud.datacatalog.v1beta1.GetTagTemplateRequest\x1a-.google.cloud.datacatalog.v1beta1.TagTemplate"D\x82\xd3\xe4\x93\x02\x37\x12\x35/v1beta1/{name=projects/*/locations/*/tagTemplates/*}\xda\x41\x04name\x12\x83\x02\n\x11UpdateTagTemplate\x12:.google.cloud.datacatalog.v1beta1.UpdateTagTemplateRequest\x1a-.google.cloud.datacatalog.v1beta1.TagTemplate"\x82\x01\x82\xd3\xe4\x93\x02R2B/v1beta1/{tag_template.name=projects/*/locations/*/tagTemplates/*}:\x0ctag_template\xda\x41\x0ctag_template\xda\x41\x18tag_template,update_mask\x12\xb3\x01\n\x11\x44\x65leteTagTemplate\x12:.google.cloud.datacatalog.v1beta1.DeleteTagTemplateRequest\x1a\x16.google.protobuf.Empty"J\x82\xd3\xe4\x93\x02\x37*5/v1beta1/{name=projects/*/locations/*/tagTemplates/*}\xda\x41\nname,force\x12\x9c\x02\n\x16\x43reateTagTemplateField\x12?.google.cloud.datacatalog.v1beta1.CreateTagTemplateFieldRequest\x1a\x32.google.cloud.datacatalog.v1beta1.TagTemplateField"\x8c\x01\x82\xd3\xe4\x93\x02T">/v1beta1/{parent=projects/*/locations/*/tagTemplates/*}/fields:\x12tag_template_field\xda\x41/parent,tag_template_field_id,tag_template_field\x12\xaa\x02\n\x16UpdateTagTemplateField\x12?.google.cloud.datacatalog.v1beta1.UpdateTagTemplateFieldRequest\x1a\x32.google.cloud.datacatalog.v1beta1.TagTemplateField"\x9a\x01\x82\xd3\xe4\x93\x02T2>/v1beta1/{name=projects/*/locations/*/tagTemplates/*/fields/*}:\x12tag_template_field\xda\x41\x17name,tag_template_field\xda\x41#name,tag_template_field,update_mask\x12\x80\x02\n\x16RenameTagTemplateField\x12?.google.cloud.datacatalog.v1beta1.RenameTagTemplateFieldRequest\x1a\x32.google.cloud.datacatalog.v1beta1.TagTemplateField"q\x82\xd3\xe4\x93\x02J"E/v1beta1/{name=projects/*/locations/*/tagTemplates/*/fields/*}:rename:\x01*\xda\x41\x1ename,new_tag_template_field_id\x12\xc6\x01\n\x16\x44\x65leteTagTemplateField\x12?.google.cloud.datacatalog.v1beta1.DeleteTagTemplateFieldRequest\x1a\x16.google.protobuf.Empty"S\x82\xd3\xe4\x93\x02@*>/v1beta1/{name=projects/*/locations/*/tagTemplates/*/fields/*}\xda\x41\nname,force\x12\xc7\x01\n\tCreateTag\x12\x32.google.cloud.datacatalog.v1beta1.CreateTagRequest\x1a%.google.cloud.datacatalog.v1beta1.Tag"_\x82\xd3\xe4\x93\x02L"E/v1beta1/{parent=projects/*/locations/*/entryGroups/*/entries/*}/tags:\x03tag\xda\x41\nparent,tag\x12\xd6\x01\n\tUpdateTag\x12\x32.google.cloud.datacatalog.v1beta1.UpdateTagRequest\x1a%.google.cloud.datacatalog.v1beta1.Tag"n\x82\xd3\xe4\x93\x02P2I/v1beta1/{tag.name=projects/*/locations/*/entryGroups/*/entries/*/tags/*}:\x03tag\xda\x41\x03tag\xda\x41\x0ftag,update_mask\x12\xad\x01\n\tDeleteTag\x12\x32.google.cloud.datacatalog.v1beta1.DeleteTagRequest\x1a\x16.google.protobuf.Empty"T\x82\xd3\xe4\x93\x02G*E/v1beta1/{name=projects/*/locations/*/entryGroups/*/entries/*/tags/*}\xda\x41\x04name\x12\xc9\x01\n\x08ListTags\x12\x31.google.cloud.datacatalog.v1beta1.ListTagsRequest\x1a\x32.google.cloud.datacatalog.v1beta1.ListTagsResponse"V\x82\xd3\xe4\x93\x02G\x12\x45/v1beta1/{parent=projects/*/locations/*/entryGroups/*/entries/*}/tags\xda\x41\x06parent\x12\xc0\x02\n\x0cSetIamPolicy\x12".google.iam.v1.SetIamPolicyRequest\x1a\x15.google.iam.v1.Policy"\xf4\x01\x82\xd3\xe4\x93\x02\xed\x01"F/v1beta1/{resource=projects/*/locations/*/tagTemplates/*}:setIamPolicy:\x01*ZJ"E/v1beta1/{resource=projects/*/locations/*/entryGroups/*}:setIamPolicy:\x01*ZT"O/v1beta1/{resource=projects/*/locations/*/entryGroups/*/entries/*}:setIamPolicy:\x01*\x12\xc0\x02\n\x0cGetIamPolicy\x12".google.iam.v1.GetIamPolicyRequest\x1a\x15.google.iam.v1.Policy"\xf4\x01\x82\xd3\xe4\x93\x02\xed\x01"F/v1beta1/{resource=projects/*/locations/*/tagTemplates/*}:getIamPolicy:\x01*ZJ"E/v1beta1/{resource=projects/*/locations/*/entryGroups/*}:getIamPolicy:\x01*ZT"O/v1beta1/{resource=projects/*/locations/*/entryGroups/*/entries/*}:getIamPolicy:\x01*\x12\xf2\x02\n\x12TestIamPermissions\x12(.google.iam.v1.TestIamPermissionsRequest\x1a).google.iam.v1.TestIamPermissionsResponse"\x86\x02\x82\xd3\xe4\x93\x02\xff\x01"L/v1beta1/{resource=projects/*/locations/*/tagTemplates/*}:testIamPermissions:\x01*ZP"K/v1beta1/{resource=projects/*/locations/*/entryGroups/*}:testIamPermissions:\x01*ZZ"U/v1beta1/{resource=projects/*/locations/*/entryGroups/*/entries/*}:testIamPermissions:\x01*\x1aN\xca\x41\x1a\x64\x61tacatalog.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformBp\n\x1c\x63om.google.cloud.datacatalogP\x01ZKgoogle.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1;datacatalog\xf8\x01\x01\x62\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_client__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, + google_dot_api_dot_resource__pb2.DESCRIPTOR, + google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_gcs__fileset__spec__pb2.DESCRIPTOR, google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_schema__pb2.DESCRIPTOR, google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_search__pb2.DESCRIPTOR, google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_table__spec__pb2.DESCRIPTOR, @@ -60,7 +69,6 @@ google_dot_iam_dot_v1_dot_policy__pb2.DESCRIPTOR, google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR, - google_dot_api_dot_client__pb2.DESCRIPTOR, ], ) @@ -83,11 +91,14 @@ _descriptor.EnumValueDescriptor( name="DATA_STREAM", index=2, number=3, serialized_options=None, type=None ), + _descriptor.EnumValueDescriptor( + name="FILESET", index=3, number=4, serialized_options=None, type=None + ), ], containing_type=None, serialized_options=None, - serialized_start=2955, - serialized_end=3022, + serialized_start=4868, + serialized_end=4948, ) _sym_db.RegisterEnumDescriptor(_ENTRYTYPE) @@ -95,6 +106,7 @@ ENTRY_TYPE_UNSPECIFIED = 0 TABLE = 2 DATA_STREAM = 3 +FILESET = 4 _SEARCHCATALOGREQUEST_SCOPE = _descriptor.Descriptor( @@ -167,8 +179,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=717, - serialized_end=815, + serialized_start=855, + serialized_end=953, ) _SEARCHCATALOGREQUEST = _descriptor.Descriptor( @@ -193,7 +205,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -211,7 +223,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -247,7 +259,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -277,8 +289,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=544, - serialized_end=815, + serialized_start=667, + serialized_end=953, ) @@ -334,8 +346,262 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=817, - serialized_end=937, + serialized_start=955, + serialized_end=1075, +) + + +_CREATEENTRYGROUPREQUEST = _descriptor.Descriptor( + name="CreateEntryGroupRequest", + full_name="google.cloud.datacatalog.v1beta1.CreateEntryGroupRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="parent", + full_name="google.cloud.datacatalog.v1beta1.CreateEntryGroupRequest.parent", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b( + "\340A\002\372A'\022%datacatalog.googleapis.com/EntryGroup" + ), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="entry_group_id", + full_name="google.cloud.datacatalog.v1beta1.CreateEntryGroupRequest.entry_group_id", + index=1, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\002"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="entry_group", + full_name="google.cloud.datacatalog.v1beta1.CreateEntryGroupRequest.entry_group", + index=2, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1078, + serialized_end=1262, +) + + +_GETENTRYGROUPREQUEST = _descriptor.Descriptor( + name="GetEntryGroupRequest", + full_name="google.cloud.datacatalog.v1beta1.GetEntryGroupRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="name", + full_name="google.cloud.datacatalog.v1beta1.GetEntryGroupRequest.name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b( + "\340A\002\372A'\n%datacatalog.googleapis.com/EntryGroup" + ), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="read_mask", + full_name="google.cloud.datacatalog.v1beta1.GetEntryGroupRequest.read_mask", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1265, + serialized_end=1395, +) + + +_DELETEENTRYGROUPREQUEST = _descriptor.Descriptor( + name="DeleteEntryGroupRequest", + full_name="google.cloud.datacatalog.v1beta1.DeleteEntryGroupRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="name", + full_name="google.cloud.datacatalog.v1beta1.DeleteEntryGroupRequest.name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b( + "\340A\002\372A'\n%datacatalog.googleapis.com/EntryGroup" + ), + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1397, + serialized_end=1483, +) + + +_CREATEENTRYREQUEST = _descriptor.Descriptor( + name="CreateEntryRequest", + full_name="google.cloud.datacatalog.v1beta1.CreateEntryRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="parent", + full_name="google.cloud.datacatalog.v1beta1.CreateEntryRequest.parent", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b( + "\340A\002\372A'\n%datacatalog.googleapis.com/EntryGroup" + ), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="entry_id", + full_name="google.cloud.datacatalog.v1beta1.CreateEntryRequest.entry_id", + index=1, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\002"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="entry", + full_name="google.cloud.datacatalog.v1beta1.CreateEntryRequest.entry", + index=2, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\002"), + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1486, + serialized_end=1653, ) @@ -361,7 +627,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -391,8 +657,47 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=939, - serialized_end=1064, + serialized_start=1656, + serialized_end=1786, +) + + +_DELETEENTRYREQUEST = _descriptor.Descriptor( + name="DeleteEntryRequest", + full_name="google.cloud.datacatalog.v1beta1.DeleteEntryRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="name", + full_name="google.cloud.datacatalog.v1beta1.DeleteEntryRequest.name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b('\340A\002\372A"\n datacatalog.googleapis.com/Entry'), + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1788, + serialized_end=1864, ) @@ -418,7 +723,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b('\340A\002\372A"\n datacatalog.googleapis.com/Entry'), file=DESCRIPTOR, ) ], @@ -430,8 +735,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1066, - serialized_end=1097, + serialized_start=1866, + serialized_end=1939, ) @@ -495,8 +800,8 @@ fields=[], ) ], - serialized_start=1099, - serialized_end=1185, + serialized_start=1941, + serialized_end=2027, ) @@ -522,7 +827,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\372A'\n%datacatalog.googleapis.com/EntryGroup"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -540,7 +845,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -561,10 +866,28 @@ serialized_options=None, file=DESCRIPTOR, ), + _descriptor.FieldDescriptor( + name="gcs_fileset_spec", + full_name="google.cloud.datacatalog.v1beta1.Entry.gcs_fileset_spec", + index=3, + number=6, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), _descriptor.FieldDescriptor( name="bigquery_table_spec", full_name="google.cloud.datacatalog.v1beta1.Entry.bigquery_table_spec", - index=3, + index=4, number=12, type=11, cpp_type=10, @@ -582,7 +905,7 @@ _descriptor.FieldDescriptor( name="bigquery_date_sharded_spec", full_name="google.cloud.datacatalog.v1beta1.Entry.bigquery_date_sharded_spec", - index=4, + index=5, number=15, type=11, cpp_type=10, @@ -598,10 +921,120 @@ file=DESCRIPTOR, ), _descriptor.FieldDescriptor( - name="display_name", - full_name="google.cloud.datacatalog.v1beta1.Entry.display_name", - index=5, - number=3, + name="display_name", + full_name="google.cloud.datacatalog.v1beta1.Entry.display_name", + index=6, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="description", + full_name="google.cloud.datacatalog.v1beta1.Entry.description", + index=7, + number=4, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="schema", + full_name="google.cloud.datacatalog.v1beta1.Entry.schema", + index=8, + number=5, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="source_system_timestamps", + full_name="google.cloud.datacatalog.v1beta1.Entry.source_system_timestamps", + index=9, + number=7, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\003"), + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=_b( + "\352Au\n datacatalog.googleapis.com/Entry\022Qprojects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}" + ), + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name="entry_type", + full_name="google.cloud.datacatalog.v1beta1.Entry.entry_type", + index=0, + containing_type=None, + fields=[], + ), + _descriptor.OneofDescriptor( + name="type_spec", + full_name="google.cloud.datacatalog.v1beta1.Entry.type_spec", + index=1, + containing_type=None, + fields=[], + ), + ], + serialized_start=2030, + serialized_end=2786, +) + + +_ENTRYGROUP = _descriptor.Descriptor( + name="EntryGroup", + full_name="google.cloud.datacatalog.v1beta1.EntryGroup", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="name", + full_name="google.cloud.datacatalog.v1beta1.EntryGroup.name", + index=0, + number=1, type=9, cpp_type=9, label=1, @@ -616,10 +1049,10 @@ file=DESCRIPTOR, ), _descriptor.FieldDescriptor( - name="description", - full_name="google.cloud.datacatalog.v1beta1.Entry.description", - index=6, - number=4, + name="display_name", + full_name="google.cloud.datacatalog.v1beta1.EntryGroup.display_name", + index=1, + number=2, type=9, cpp_type=9, label=1, @@ -634,15 +1067,15 @@ file=DESCRIPTOR, ), _descriptor.FieldDescriptor( - name="schema", - full_name="google.cloud.datacatalog.v1beta1.Entry.schema", - index=7, - number=5, - type=11, - cpp_type=10, + name="description", + full_name="google.cloud.datacatalog.v1beta1.EntryGroup.description", + index=2, + number=3, + type=9, + cpp_type=9, label=1, has_default_value=False, - default_value=None, + default_value=_b("").decode("utf-8"), message_type=None, enum_type=None, containing_type=None, @@ -652,10 +1085,10 @@ file=DESCRIPTOR, ), _descriptor.FieldDescriptor( - name="source_system_timestamps", - full_name="google.cloud.datacatalog.v1beta1.Entry.source_system_timestamps", - index=8, - number=7, + name="data_catalog_timestamps", + full_name="google.cloud.datacatalog.v1beta1.EntryGroup.data_catalog_timestamps", + index=3, + number=4, type=11, cpp_type=10, label=1, @@ -666,28 +1099,22 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], extensions=[], nested_types=[], enum_types=[], - serialized_options=None, + serialized_options=_b( + "\352Aj\n%datacatalog.googleapis.com/EntryGroup\022Aprojects/{project}/locations/{location}/entryGroups/{entry_group}" + ), is_extendable=False, syntax="proto3", extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="type_spec", - full_name="google.cloud.datacatalog.v1beta1.Entry.type_spec", - index=0, - containing_type=None, - fields=[], - ) - ], - serialized_start=1188, - serialized_end=1674, + oneofs=[], + serialized_start=2789, + serialized_end=3059, ) @@ -713,7 +1140,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A(\022&datacatalog.googleapis.com/TagTemplate" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -731,7 +1160,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -749,7 +1178,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -761,8 +1190,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1677, - serialized_end=1813, + serialized_start=3062, + serialized_end=3256, ) @@ -788,7 +1217,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A(\n&datacatalog.googleapis.com/TagTemplate" + ), file=DESCRIPTOR, ) ], @@ -800,8 +1231,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1815, - serialized_end=1852, + serialized_start=3258, + serialized_end=3343, ) @@ -827,7 +1258,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -857,8 +1288,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1855, - serialized_end=1999, + serialized_start=3346, + serialized_end=3495, ) @@ -884,7 +1315,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A(\n&datacatalog.googleapis.com/TagTemplate" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -902,7 +1335,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -914,8 +1347,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2001, - serialized_end=2056, + serialized_start=3497, + serialized_end=3605, ) @@ -941,7 +1374,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A \n\036datacatalog.googleapis.com/Tag" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -959,7 +1394,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -971,8 +1406,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2058, - serialized_end=2144, + serialized_start=3608, + serialized_end=3739, ) @@ -998,7 +1433,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1028,8 +1463,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2146, - serialized_end=2265, + serialized_start=3741, + serialized_end=3865, ) @@ -1055,7 +1490,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A \022\036datacatalog.googleapis.com/Tag" + ), file=DESCRIPTOR, ) ], @@ -1067,8 +1504,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2267, - serialized_end=2299, + serialized_start=3867, + serialized_end=3939, ) @@ -1094,7 +1531,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A(\n&datacatalog.googleapis.com/TagTemplate" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1112,7 +1551,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1130,7 +1569,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -1142,8 +1581,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2302, - serialized_end=2460, + serialized_start=3942, + serialized_end=4158, ) @@ -1169,7 +1608,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A-\n+datacatalog.googleapis.com/TagTemplateField" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1187,7 +1628,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1217,8 +1658,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2463, - serialized_end=2637, + serialized_start=4161, + serialized_end=4393, ) @@ -1244,7 +1685,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A-\n+datacatalog.googleapis.com/TagTemplateField" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1262,7 +1705,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -1274,8 +1717,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2639, - serialized_end=2719, + serialized_start=4396, + serialized_end=4534, ) @@ -1301,7 +1744,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A-\n+datacatalog.googleapis.com/TagTemplateField" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1319,7 +1764,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -1331,8 +1776,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2721, - serialized_end=2781, + serialized_start=4536, + serialized_end=4654, ) @@ -1358,7 +1803,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A \022\036datacatalog.googleapis.com/Tag" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1406,8 +1853,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2783, - serialized_end=2855, + serialized_start=4656, + serialized_end=4768, ) @@ -1463,8 +1910,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2857, - serialized_end=2953, + serialized_start=4770, + serialized_end=4866, ) _SEARCHCATALOGREQUEST_SCOPE.containing_type = _SEARCHCATALOGREQUEST @@ -1474,6 +1921,11 @@ ].message_type = ( google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_search__pb2._SEARCHCATALOGRESULT ) +_CREATEENTRYGROUPREQUEST.fields_by_name["entry_group"].message_type = _ENTRYGROUP +_GETENTRYGROUPREQUEST.fields_by_name[ + "read_mask" +].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK +_CREATEENTRYREQUEST.fields_by_name["entry"].message_type = _ENTRY _UPDATEENTRYREQUEST.fields_by_name["entry"].message_type = _ENTRY _UPDATEENTRYREQUEST.fields_by_name[ "update_mask" @@ -1491,6 +1943,11 @@ "sql_resource" ].containing_oneof = _LOOKUPENTRYREQUEST.oneofs_by_name["target_name"] _ENTRY.fields_by_name["type"].enum_type = _ENTRYTYPE +_ENTRY.fields_by_name[ + "gcs_fileset_spec" +].message_type = ( + google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_gcs__fileset__spec__pb2._GCSFILESETSPEC +) _ENTRY.fields_by_name[ "bigquery_table_spec" ].message_type = ( @@ -1511,6 +1968,14 @@ ].message_type = ( google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_timestamps__pb2._SYSTEMTIMESTAMPS ) +_ENTRY.oneofs_by_name["entry_type"].fields.append(_ENTRY.fields_by_name["type"]) +_ENTRY.fields_by_name["type"].containing_oneof = _ENTRY.oneofs_by_name["entry_type"] +_ENTRY.oneofs_by_name["type_spec"].fields.append( + _ENTRY.fields_by_name["gcs_fileset_spec"] +) +_ENTRY.fields_by_name["gcs_fileset_spec"].containing_oneof = _ENTRY.oneofs_by_name[ + "type_spec" +] _ENTRY.oneofs_by_name["type_spec"].fields.append( _ENTRY.fields_by_name["bigquery_table_spec"] ) @@ -1523,6 +1988,11 @@ _ENTRY.fields_by_name[ "bigquery_date_sharded_spec" ].containing_oneof = _ENTRY.oneofs_by_name["type_spec"] +_ENTRYGROUP.fields_by_name[ + "data_catalog_timestamps" +].message_type = ( + google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_timestamps__pb2._SYSTEMTIMESTAMPS +) _CREATETAGTEMPLATEREQUEST.fields_by_name[ "tag_template" ].message_type = ( @@ -1563,10 +2033,16 @@ ].message_type = google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_tags__pb2._TAG DESCRIPTOR.message_types_by_name["SearchCatalogRequest"] = _SEARCHCATALOGREQUEST DESCRIPTOR.message_types_by_name["SearchCatalogResponse"] = _SEARCHCATALOGRESPONSE +DESCRIPTOR.message_types_by_name["CreateEntryGroupRequest"] = _CREATEENTRYGROUPREQUEST +DESCRIPTOR.message_types_by_name["GetEntryGroupRequest"] = _GETENTRYGROUPREQUEST +DESCRIPTOR.message_types_by_name["DeleteEntryGroupRequest"] = _DELETEENTRYGROUPREQUEST +DESCRIPTOR.message_types_by_name["CreateEntryRequest"] = _CREATEENTRYREQUEST DESCRIPTOR.message_types_by_name["UpdateEntryRequest"] = _UPDATEENTRYREQUEST +DESCRIPTOR.message_types_by_name["DeleteEntryRequest"] = _DELETEENTRYREQUEST DESCRIPTOR.message_types_by_name["GetEntryRequest"] = _GETENTRYREQUEST DESCRIPTOR.message_types_by_name["LookupEntryRequest"] = _LOOKUPENTRYREQUEST DESCRIPTOR.message_types_by_name["Entry"] = _ENTRY +DESCRIPTOR.message_types_by_name["EntryGroup"] = _ENTRYGROUP DESCRIPTOR.message_types_by_name["CreateTagTemplateRequest"] = _CREATETAGTEMPLATEREQUEST DESCRIPTOR.message_types_by_name["GetTagTemplateRequest"] = _GETTAGTEMPLATEREQUEST DESCRIPTOR.message_types_by_name["UpdateTagTemplateRequest"] = _UPDATETAGTEMPLATEREQUEST @@ -1606,21 +2082,21 @@ include_org_ids: Data Catalog tries to automatically choose the right corpus of data to search through. You can ensure an organization is - included by adding it to "include\_org\_ids". You can ensure a - project's org is included with "include\_project\_ids". You + included by adding it to ``include_org_ids``. You can ensure a + project's org is included with ``include_project_ids``. You must specify at least one organization using - "include\_org\_ids" or "include\_project\_ids" in all search + ``include_org_ids`` or ``include_project_ids`` in all search requests. List of organization IDs to search within. To find your organization ID, follow instructions in https://cloud.google.com/resource-manager/docs/creating- - managing-organization + managing-organization. include_project_ids: List of project IDs to search within. To learn more about the distinction between project names/IDs/numbers, go to - https://cloud.google.com/docs/overview/#projects + https://cloud.google.com/docs/overview/#projects. include_gcp_public_datasets: - If true, include Google Cloud Platform (GCP) public datasets - in the search results. Info on GCP public datasets is + If ``true``, include Google Cloud Platform (GCP) public + datasets in the search results. Info on GCP public datasets is available at https://cloud.google.com/public-datasets/. By default, GCP public datasets are excluded. """, @@ -1649,22 +2125,20 @@ 10. Max limit for page\_size is 1000. Throws an invalid argument for page\_size > 1000. page_token: - Optional pagination token returned in an earlier [SearchCatalo - gResponse.next\_page\_token][google.cloud.datacatalog.v1beta1. - DataCatalog.SearchCatalogResponse.next\_page\_token]; - indicates that this is a continuation of a prior [SearchCatalo - g][google.cloud.datacatalog.v1beta1.DataCatalog.SearchCatalog] + Optional. Pagination token returned in an earlier [SearchCatal + ogResponse.next\_page\_token][google.cloud.datacatalog.v1beta1 + .SearchCatalogResponse.next\_page\_token], which indicates + that this is a continuation of a prior [SearchCatalogRequest][ + google.cloud.datacatalog.v1beta1.DataCatalog.SearchCatalog] call, and that the system should return the next page of data. - If empty then the first page is returned. + If empty, the first page is returned. order_by: Specifies the ordering of results, currently supported case- - sensitive choices are: .. raw:: html
    .. raw:: html -
  • relevance .. raw:: html
  • .. raw:: html -
  • last\_access\_timestamp [asc\|desc], defaults to - descending if not specified, .. raw:: html
  • .. - raw:: html
  • last\_modified\_timestamp [asc\|desc], - defaults to descending if not specified. .. raw:: html -
  • .. raw:: html
+ sensitive choices are: - ``relevance``, only supports + desecending - ``last_access_timestamp [asc|desc]``, defaults + to descending if not specified - ``last_modified_timestamp + [asc|desc]``, defaults to descending if not specified If + not specified, defaults to ``relevance`` descending. """, # @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1beta1.SearchCatalogRequest) ), @@ -1684,7 +2158,7 @@ Attributes: results: - Search results in descending order of relevance. + Search results. next_page_token: The token that can be used to retrieve the next page of results. @@ -1694,6 +2168,104 @@ ) _sym_db.RegisterMessage(SearchCatalogResponse) +CreateEntryGroupRequest = _reflection.GeneratedProtocolMessageType( + "CreateEntryGroupRequest", + (_message.Message,), + dict( + DESCRIPTOR=_CREATEENTRYGROUPREQUEST, + __module__="google.cloud.datacatalog_v1beta1.proto.datacatalog_pb2", + __doc__="""Request message for + [CreateEntryGroup][google.cloud.datacatalog.v1beta1.DataCatalog.CreateEntryGroup]. + + + Attributes: + parent: + Required. The name of the project this entry group is in. + Example: - projects/{project\_id}/locations/{location} Note + that this EntryGroup and its child resources may not actually + be stored in the location in this name. + entry_group_id: + Required. The id of the entry group to create. + entry_group: + The entry group to create. Defaults to an empty entry group. + """, + # @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1beta1.CreateEntryGroupRequest) + ), +) +_sym_db.RegisterMessage(CreateEntryGroupRequest) + +GetEntryGroupRequest = _reflection.GeneratedProtocolMessageType( + "GetEntryGroupRequest", + (_message.Message,), + dict( + DESCRIPTOR=_GETENTRYGROUPREQUEST, + __module__="google.cloud.datacatalog_v1beta1.proto.datacatalog_pb2", + __doc__="""Request message for + [GetEntryGroup][google.cloud.datacatalog.v1beta1.DataCatalog.GetEntryGroup]. + + + Attributes: + name: + Required. The name of the entry group. For example, ``projects + /{project_id}/locations/{location}/entryGroups/{entry_group_id + }``. + read_mask: + The fields to return. If not set or empty, all fields are + returned. + """, + # @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1beta1.GetEntryGroupRequest) + ), +) +_sym_db.RegisterMessage(GetEntryGroupRequest) + +DeleteEntryGroupRequest = _reflection.GeneratedProtocolMessageType( + "DeleteEntryGroupRequest", + (_message.Message,), + dict( + DESCRIPTOR=_DELETEENTRYGROUPREQUEST, + __module__="google.cloud.datacatalog_v1beta1.proto.datacatalog_pb2", + __doc__="""Request message for + [DeleteEntryGroup][google.cloud.datacatalog.v1beta1.DataCatalog.DeleteEntryGroup]. + + + Attributes: + name: + Required. The name of the entry group. For example, ``projects + /{project_id}/locations/{location}/entryGroups/{entry_group_id + }``. + """, + # @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1beta1.DeleteEntryGroupRequest) + ), +) +_sym_db.RegisterMessage(DeleteEntryGroupRequest) + +CreateEntryRequest = _reflection.GeneratedProtocolMessageType( + "CreateEntryRequest", + (_message.Message,), + dict( + DESCRIPTOR=_CREATEENTRYREQUEST, + __module__="google.cloud.datacatalog_v1beta1.proto.datacatalog_pb2", + __doc__="""Request message for + [CreateEntry][google.cloud.datacatalog.v1beta1.DataCatalog.CreateEntry]. + + + Attributes: + parent: + Required. The name of the entry group this entry is in. + Example: - projects/{project\_id}/locations/{location}/entry + Groups/{entry\_group\_id} Note that this Entry and its child + resources may not actually be stored in the location in this + name. + entry_id: + Required. The id of the entry to create. + entry: + Required. The entry to create. + """, + # @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1beta1.CreateEntryRequest) + ), +) +_sym_db.RegisterMessage(CreateEntryRequest) + UpdateEntryRequest = _reflection.GeneratedProtocolMessageType( "UpdateEntryRequest", (_message.Message,), @@ -1706,19 +2278,41 @@ Attributes: entry: - Required. The updated Entry. + Required. The updated entry. The "name" field must be set. update_mask: - Optional. The fields to update on the entry. If absent or - empty, all modifiable fields are updated. Modifiable fields - in synced entries: 1. schema (Pub/Sub topics only) - Modifiable fields in native entries: 1. display\_name 2. - description 3. schema + The fields to update on the entry. If absent or empty, all + modifiable fields are updated. The following fields are + modifiable: \* For entries with type ``DATA_STREAM``: \* + ``schema`` \* For entries with type ``FILESET`` \* ``schema`` + \* ``display_name`` \* ``description`` \* ``gcs_fileset_spec`` + \* ``gcs_fileset_spec.file_patterns`` """, # @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1beta1.UpdateEntryRequest) ), ) _sym_db.RegisterMessage(UpdateEntryRequest) +DeleteEntryRequest = _reflection.GeneratedProtocolMessageType( + "DeleteEntryRequest", + (_message.Message,), + dict( + DESCRIPTOR=_DELETEENTRYREQUEST, + __module__="google.cloud.datacatalog_v1beta1.proto.datacatalog_pb2", + __doc__="""Request message for + [DeleteEntry][google.cloud.datacatalog.v1beta1.DataCatalog.DeleteEntry]. + + + Attributes: + name: + Required. The name of the entry. Example: - projects/{projec + t\_id}/locations/{location}/entryGroups/{entry\_group\_id}/ent + ries/{entry\_id} + """, + # @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1beta1.DeleteEntryRequest) + ), +) +_sym_db.RegisterMessage(DeleteEntryRequest) + GetEntryRequest = _reflection.GeneratedProtocolMessageType( "GetEntryRequest", (_message.Message,), @@ -1731,9 +2325,13 @@ Attributes: name: - Required. The name of the entry. For example, "projects/{proje - ct\_id}/locations/{location}/entryGroups/{entry\_group\_id}/en - tries/{entry\_id}". + Required. The name of the entry. Example: - projects/{projec + t\_id}/locations/{location}/entryGroups/{entry\_group\_id}/ent + ries/{entry\_id} Entry groups are logical groupings of + entries. Currently, users cannot create/modify entry groups. + They are created by Data Catalog; they include ``@bigquery`` + for all BigQuery entries, and ``@pubsub`` for all Cloud + Pub/Sub entries. """, # @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1beta1.GetEntryRequest) ), @@ -1757,23 +2355,20 @@ linked_resource: The full name of the Google Cloud Platform resource the Data Catalog entry represents. See: https://cloud.google.com/apis/d - esign/resource\_names#full\_resource\_name Full names are - case-sensitive. Examples: "//bigquery.googleapis.com/projects - /projectId/datasets/datasetId/tables/tableId". - "//pubsub.googleapis.com/projects/projectId/topics/topicId" + esign/resource\_names#full\_resource\_name. Full names are + case-sensitive. Examples: - //bigquery.googleapis.com/proje + cts/projectId/datasets/datasetId/tables/tableId - + //pubsub.googleapis.com/projects/projectId/topics/topicId sql_resource: The SQL name of the entry. SQL names are case-sensitive. - Examples: .. raw:: html
    .. raw:: html
  • - cloud\_pubsub.project\_id.topic\_id .. raw:: html
  • - .. raw:: html
  • pubsub.project\_id.\ - ``topic.id.with.dots`` .. raw:: html
  • .. raw:: html -
  • bigquery.project\_id.dataset\_id.table\_id .. raw:: - html
  • .. raw:: html
  • datacatalog.project\_i - d.location\_id.entry\_group\_id.entry\_id .. raw:: html -
  • .. raw:: html
\*\_ids shoud satisfy the - standard SQL rules for identifiers. + Examples: - ``cloud_pubsub.project_id.topic_id`` - + ``pubsub.project_id.`topic.id.with.dots``` - + ``bigquery.project_id.dataset_id.table_id`` - + ``datacatalog.project_id.location_id.entry_group_id.entry_id`` + ``*_id``\ s shoud satisfy the standard SQL rules for + identifiers. https://cloud.google.com/bigquery/docs/reference/standard- - sql/lexical + sql/lexical. """, # @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1beta1.LookupEntryRequest) ), @@ -1787,9 +2382,9 @@ DESCRIPTOR=_ENTRY, __module__="google.cloud.datacatalog_v1beta1.proto.datacatalog_pb2", __doc__="""Entry Metadata. A Data Catalog Entry resource represents another - resource in Google Cloud Platform, such as a BigQuery Dataset or a - Pub/Sub Topic. Clients can use the ``linked_resource`` field in the - Entry resource to refer to the original resource id of the source + resource in Google Cloud Platform, such as a BigQuery dataset or a Cloud + Pub/Sub topic. Clients can use the ``linked_resource`` field in the + Entry resource to refer to the original resource ID of the source system. An Entry resource contains resource details, such as its schema. An @@ -1799,54 +2394,90 @@ Attributes: name: - Required when used in [UpdateEntryRequest][google.cloud.dataca - talog.v1beta1.UpdateEntryRequest]. The Data Catalog resource - name of the entry in URL format. For example, "projects/{proje - ct\_id}/locations/{location}/entryGroups/{entry\_group\_id}/en - tries/{entry\_id}". Note that this Entry and its child - resources may not actually be stored in the location in this - name. + The Data Catalog resource name of the entry in URL format. + Example: - projects/{project\_id}/locations/{location}/entry + Groups/{entry\_group\_id}/entries/{entry\_id} Note that this + Entry and its child resources may not actually be stored in + the location in this name. linked_resource: - Output only. The full name of the cloud resource the entry - belongs to. See: https://cloud.google.com/apis/design/resource - \_names#full\_resource\_name Data Catalog supports resources - from select Google Cloud Platform systems. ``linked_resource`` - is the full name of the Google Cloud Platform resource. For - example, the ``linked_resource`` for a table resource from - BigQuery is: "//bigquery.googleapis.com/projects/projectId/da - tasets/datasetId/tables/tableId". + Output only. The resource this metadata entry refers to. For + Google Cloud Platform resources, ``linked_resource`` is the + `full name of the resource `__. For example, the + ``linked_resource`` for a table resource from BigQuery is: - + //bigquery.googleapis.com/projects/projectId/datasets/datasetI + d/tables/tableId + entry_type: + Required. Entry type. type: - Required. Type of entry. + The type of the entry. type_spec: - Optional. Type specification information. + Type specification information. + gcs_fileset_spec: + Specification that applies to a Cloud Storage fileset. This is + only valid on entries of type FILESET. bigquery_table_spec: Specification that applies to a BigQuery table. This is only - valid on entries of type TABLE. + valid on entries of type ``TABLE``. bigquery_date_sharded_spec: Specification for a group of BigQuery tables with name pattern - [prefix]YYYYMMDD. Context: + ``[prefix]YYYYMMDD``. Context: https://cloud.google.com/bigquery/docs/partitioned- - tables#partitioning\_versus\_sharding + tables#partitioning\_versus\_sharding. display_name: - Optional. Display information such as title and description. A - short name to identify the entry, for example, "Analytics Data - - Jan 2011". Default value is an empty string. + Display information such as title and description. A short + name to identify the entry, for example, "Analytics Data - Jan + 2011". Default value is an empty string. description: - Optional. Entry description, which can consist of several - sentences or paragraphs that describe entry contents. Default - value is an empty string. + Entry description, which can consist of several sentences or + paragraphs that describe entry contents. Default value is an + empty string. schema: - Optional. Schema of the entry. An entry might not have any - schema attached to it. + Schema of the entry. An entry might not have any schema + attached to it. source_system_timestamps: Output only. Timestamps about the underlying Google Cloud - Platform resource -- not about this Data Catalog Entry. + Platform resource, not about this Data Catalog Entry. """, # @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1beta1.Entry) ), ) _sym_db.RegisterMessage(Entry) +EntryGroup = _reflection.GeneratedProtocolMessageType( + "EntryGroup", + (_message.Message,), + dict( + DESCRIPTOR=_ENTRYGROUP, + __module__="google.cloud.datacatalog_v1beta1.proto.datacatalog_pb2", + __doc__="""EntryGroup Metadata. An EntryGroup resource represents a logical + grouping of zero or more Data Catalog + [Entry][google.cloud.datacatalog.v1beta1.Entry] resources. + + + Attributes: + name: + The resource name of the entry group in URL format. Example: + - projects/{project\_id}/locations/{location}/entryGroups/{en + try\_group\_id} Note that this EntryGroup and its child + resources may not actually be stored in the location in this + name. + display_name: + A short name to identify the entry group, for example, + "analytics data - jan 2011". Default value is an empty string. + description: + Entry group description, which can consist of several + sentences or paragraphs that describe entry group contents. + Default value is an empty string. + data_catalog_timestamps: + Output only. Timestamps about this EntryGroup. Default value + is empty timestamps. + """, + # @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1beta1.EntryGroup) + ), +) +_sym_db.RegisterMessage(EntryGroup) + CreateTagTemplateRequest = _reflection.GeneratedProtocolMessageType( "CreateTagTemplateRequest", (_message.Message,), @@ -1860,10 +2491,10 @@ Attributes: parent: Required. The name of the project and the location this - template is in. Example: - "projects/{project\_id}/locations/{location}". Note that this - TagTemplate and its child resources may not actually be stored - in the location in this name. + template is in. Example: - + projects/{project\_id}/locations/{location} TagTemplate and + its child resources may not actually be stored in the location + in this name. tag_template_id: Required. The id of the tag template to create. tag_template: @@ -1886,9 +2517,9 @@ Attributes: name: - Required. The name of the tag template. For example, "projects - /{project\_id}/locations/{location}/tagTemplates/{tag\_templat - e\_id}". + Required. The name of the tag template. Example: - projects/ + {project\_id}/locations/{location}/tagTemplates/{tag\_template + \_id} """, # @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1beta1.GetTagTemplateRequest) ), @@ -1907,12 +2538,12 @@ Attributes: tag_template: - Required. The template to update. + Required. The template to update. The "name" field must be + set. update_mask: - Optional. The field mask specifies the parts of the template - to overwrite. Allowed fields: - display\_name If - update\_mask is omitted, all of the allowed fields above will - be updated. + The field mask specifies the parts of the template to + overwrite. Allowed fields: - ``display_name`` If absent or + empty, all of the allowed fields above will be updated. """, # @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1beta1.UpdateTagTemplateRequest) ), @@ -1931,13 +2562,14 @@ Attributes: name: - Required. The name of the tag template to delete. For example, - "projects/{project\_id}/locations/{location}/tagTemplates/{tag - \_template\_id}". + Required. The name of the tag template to delete. Example: - + projects/{project\_id}/locations/{location}/tagTemplates/{tag\ + _template\_id} force: - Required. Currently, this field must always be set to true. - This confirms the deletion of any possible tags using this - template. force = false will be supported in the future. + Required. Currently, this field must always be set to + ``true``. This confirms the deletion of any possible tags + using this template. ``force = false`` will be supported in + the future. """, # @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1beta1.DeleteTagTemplateRequest) ), @@ -1957,9 +2589,9 @@ Attributes: parent: Required. The name of the resource to attach this tag to. Tags - can be attached to Entries. (example: "projects/{project\_id}/ - locations/{location}/entryGroups/{entry\_group\_id}/entries/{e - ntry\_id}"). Note that this Tag and its child resources may + can be attached to Entries. Example: - projects/{project\_id + }/locations/{location}/entryGroups/{entry\_group\_id}/entries/ + {entry\_id} Note that this Tag and its child resources may not actually be stored in the location in this name. tag: Required. The tag to create. @@ -1981,11 +2613,11 @@ Attributes: tag: - Required. The updated tag. + Required. The updated tag. The "name" field must be set. update_mask: - Optional. The fields to update on the Tag. If absent or empty, - all modifiable fields are updated. Currently the only - modifiable field is the field ``fields``. + The fields to update on the Tag. If absent or empty, all + modifiable fields are updated. Currently the only modifiable + field is the field ``fields``. """, # @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1beta1.UpdateTagRequest) ), @@ -2004,9 +2636,9 @@ Attributes: name: - Required. The name of the tag to delete. For example, "project - s/{project\_id}/locations/{location}/entryGroups/{entry\_group - \_id}/entries/{entry\_id}/tags/{tag\_id}". + Required. The name of the tag to delete. Example: - projects + /{project\_id}/locations/{location}/entryGroups/{entry\_group\ + _id}/entries/{entry\_id}/tags/{tag\_id} """, # @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1beta1.DeleteTagRequest) ), @@ -2026,15 +2658,15 @@ Attributes: parent: Required. The name of the project this template is in. - Example: "projects/{project\_id}/locations/{location}/tagTempl - ates/{tag\_template\_id}". Note that this TagTemplateField may - not actually be stored in the location in this name. + Example: - projects/{project\_id}/locations/{location}/tagTe + mplates/{tag\_template\_id} Note that this TagTemplateField + may not actually be stored in the location in this name. tag_template_field_id: - Required. The id of the tag template field to create. Field + Required. The ID of the tag template field to create. Field ids can contain letters (both uppercase and lowercase), - numbers (0-9), underscores (\_) and dashes (-). Field ids must + numbers (0-9), underscores (\_) and dashes (-). Field IDs must be at least 1 character long and at most 128 characters long. - Field ids must also be unique to their template. + Field IDs must also be unique within their template. tag_template_field: Required. The tag template field to create. """, @@ -2055,19 +2687,19 @@ Attributes: name: - Required. The name of the tag template field. For example, "pr - ojects/{project\_id}/locations/{location}/tagTemplates/{tag\_t - emplate\_id}/fields/{tag\_template\_field\_id}". + Required. The name of the tag template field. Example: - pro + jects/{project\_id}/locations/{location}/tagTemplates/{tag\_te + mplate\_id}/fields/{tag\_template\_field\_id} tag_template_field: Required. The template to update. update_mask: - Optional. The field mask specifies the parts of the template - to overwrite. Allowed fields: - display\_name - - type.enum\_type If update\_mask is omitted, all of the - allowed fields above will be updated. When updating an enum - type, the provided values will be merged with the existing - values. Therefore, enum values can only be added, existing - enum values cannot be deleted nor renamed. + The field mask specifies the parts of the template to be + updated. Allowed fields: - ``display_name`` - + ``type.enum_type`` If ``update_mask`` is not set or empty, + all of the allowed fields above will be updated. When + updating an enum type, the provided values will be merged with + the existing values. Therefore, enum values can only be added, + existing enum values cannot be deleted nor renamed. """, # @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1beta1.UpdateTagTemplateFieldRequest) ), @@ -2086,12 +2718,12 @@ Attributes: name: - Required. The name of the tag template. For example, "projects - /{project\_id}/locations/{location}/tagTemplates/{tag\_templat - e\_id}/fields/{tag\_template\_field\_id}". + Required. The name of the tag template. Example: - projects/ + {project\_id}/locations/{location}/tagTemplates/{tag\_template + \_id}/fields/{tag\_template\_field\_id} new_tag_template_field_id: Required. The new ID of this tag template field. For example, - "my\_new\_field". + ``my_new_field``. """, # @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1beta1.RenameTagTemplateFieldRequest) ), @@ -2110,13 +2742,14 @@ Attributes: name: - Required. The name of the tag template field to delete. For - example, "projects/{project\_id}/locations/{location}/tagTempl - ates/{tag\_template\_id}/fields/{tag\_template\_field\_id}". + Required. The name of the tag template field to delete. + Example: - projects/{project\_id}/locations/{location}/tagTe + mplates/{tag\_template\_id}/fields/{tag\_template\_field\_id} force: - Required. Currently, this field must always be set to true. - This confirms the deletion of this field from any tags using - this field. force = false will be supported in the future. + Required. Currently, this field must always be set to + ``true``. This confirms the deletion of this field from any + tags using this field. ``force = false`` will be supported in + the future. """, # @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1beta1.DeleteTagTemplateFieldRequest) ), @@ -2139,11 +2772,11 @@ tags of. The resource could be an [Entry][google.cloud.datacatalog.v1beta1.Entry]. page_size: - Optional. The maximum number of tags to return. Default is 10. - Max limit is 1000. + The maximum number of tags to return. Default is 10. Max limit + is 1000. page_token: - Optional. Token that specifies which page is requested. If - empty, the first page is returned. + Token that specifies which page is requested. If empty, the + first page is returned. """, # @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1beta1.ListTagsRequest) ), @@ -2174,6 +2807,48 @@ DESCRIPTOR._options = None +_SEARCHCATALOGREQUEST.fields_by_name["scope"]._options = None +_SEARCHCATALOGREQUEST.fields_by_name["query"]._options = None +_SEARCHCATALOGREQUEST.fields_by_name["page_token"]._options = None +_CREATEENTRYGROUPREQUEST.fields_by_name["parent"]._options = None +_CREATEENTRYGROUPREQUEST.fields_by_name["entry_group_id"]._options = None +_GETENTRYGROUPREQUEST.fields_by_name["name"]._options = None +_DELETEENTRYGROUPREQUEST.fields_by_name["name"]._options = None +_CREATEENTRYREQUEST.fields_by_name["parent"]._options = None +_CREATEENTRYREQUEST.fields_by_name["entry_id"]._options = None +_CREATEENTRYREQUEST.fields_by_name["entry"]._options = None +_UPDATEENTRYREQUEST.fields_by_name["entry"]._options = None +_DELETEENTRYREQUEST.fields_by_name["name"]._options = None +_GETENTRYREQUEST.fields_by_name["name"]._options = None +_ENTRY.fields_by_name["name"]._options = None +_ENTRY.fields_by_name["linked_resource"]._options = None +_ENTRY.fields_by_name["source_system_timestamps"]._options = None +_ENTRY._options = None +_ENTRYGROUP.fields_by_name["data_catalog_timestamps"]._options = None +_ENTRYGROUP._options = None +_CREATETAGTEMPLATEREQUEST.fields_by_name["parent"]._options = None +_CREATETAGTEMPLATEREQUEST.fields_by_name["tag_template_id"]._options = None +_CREATETAGTEMPLATEREQUEST.fields_by_name["tag_template"]._options = None +_GETTAGTEMPLATEREQUEST.fields_by_name["name"]._options = None +_UPDATETAGTEMPLATEREQUEST.fields_by_name["tag_template"]._options = None +_DELETETAGTEMPLATEREQUEST.fields_by_name["name"]._options = None +_DELETETAGTEMPLATEREQUEST.fields_by_name["force"]._options = None +_CREATETAGREQUEST.fields_by_name["parent"]._options = None +_CREATETAGREQUEST.fields_by_name["tag"]._options = None +_UPDATETAGREQUEST.fields_by_name["tag"]._options = None +_DELETETAGREQUEST.fields_by_name["name"]._options = None +_CREATETAGTEMPLATEFIELDREQUEST.fields_by_name["parent"]._options = None +_CREATETAGTEMPLATEFIELDREQUEST.fields_by_name["tag_template_field_id"]._options = None +_CREATETAGTEMPLATEFIELDREQUEST.fields_by_name["tag_template_field"]._options = None +_UPDATETAGTEMPLATEFIELDREQUEST.fields_by_name["name"]._options = None +_UPDATETAGTEMPLATEFIELDREQUEST.fields_by_name["tag_template_field"]._options = None +_RENAMETAGTEMPLATEFIELDREQUEST.fields_by_name["name"]._options = None +_RENAMETAGTEMPLATEFIELDREQUEST.fields_by_name[ + "new_tag_template_field_id" +]._options = None +_DELETETAGTEMPLATEFIELDREQUEST.fields_by_name["name"]._options = None +_DELETETAGTEMPLATEFIELDREQUEST.fields_by_name["force"]._options = None +_LISTTAGSREQUEST.fields_by_name["parent"]._options = None _DATACATALOG = _descriptor.ServiceDescriptor( name="DataCatalog", @@ -2183,8 +2858,8 @@ serialized_options=_b( "\312A\032datacatalog.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform" ), - serialized_start=3025, - serialized_end=6729, + serialized_start=4951, + serialized_end=10527, methods=[ _descriptor.MethodDescriptor( name="SearchCatalog", @@ -2194,35 +2869,90 @@ input_type=_SEARCHCATALOGREQUEST, output_type=_SEARCHCATALOGRESPONSE, serialized_options=_b( - '\202\323\344\223\002\034"\027/v1beta1/catalog:search:\001*' + '\202\323\344\223\002\034"\027/v1beta1/catalog:search:\001*\332A\013scope,query' + ), + ), + _descriptor.MethodDescriptor( + name="CreateEntryGroup", + full_name="google.cloud.datacatalog.v1beta1.DataCatalog.CreateEntryGroup", + index=1, + containing_service=None, + input_type=_CREATEENTRYGROUPREQUEST, + output_type=_ENTRYGROUP, + serialized_options=_b( + '\202\323\344\223\002C"4/v1beta1/{parent=projects/*/locations/*}/entryGroups:\013entry_group\332A!parent,entry_group_id,entry_group' + ), + ), + _descriptor.MethodDescriptor( + name="GetEntryGroup", + full_name="google.cloud.datacatalog.v1beta1.DataCatalog.GetEntryGroup", + index=2, + containing_service=None, + input_type=_GETENTRYGROUPREQUEST, + output_type=_ENTRYGROUP, + serialized_options=_b( + "\202\323\344\223\0026\0224/v1beta1/{name=projects/*/locations/*/entryGroups/*}\332A\004name\332A\016name,read_mask" + ), + ), + _descriptor.MethodDescriptor( + name="DeleteEntryGroup", + full_name="google.cloud.datacatalog.v1beta1.DataCatalog.DeleteEntryGroup", + index=3, + containing_service=None, + input_type=_DELETEENTRYGROUPREQUEST, + output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, + serialized_options=_b( + "\202\323\344\223\0026*4/v1beta1/{name=projects/*/locations/*/entryGroups/*}\332A\004name" + ), + ), + _descriptor.MethodDescriptor( + name="CreateEntry", + full_name="google.cloud.datacatalog.v1beta1.DataCatalog.CreateEntry", + index=4, + containing_service=None, + input_type=_CREATEENTRYREQUEST, + output_type=_ENTRY, + serialized_options=_b( + '\202\323\344\223\002G">/v1beta1/{parent=projects/*/locations/*/entryGroups/*}/entries:\005entry\332A\025parent,entry_id,entry' ), ), _descriptor.MethodDescriptor( name="UpdateEntry", full_name="google.cloud.datacatalog.v1beta1.DataCatalog.UpdateEntry", - index=1, + index=5, containing_service=None, input_type=_UPDATEENTRYREQUEST, output_type=_ENTRY, serialized_options=_b( - "\202\323\344\223\002M2D/v1beta1/{entry.name=projects/*/locations/*/entryGroups/*/entries/*}:\005entry" + "\202\323\344\223\002M2D/v1beta1/{entry.name=projects/*/locations/*/entryGroups/*/entries/*}:\005entry\332A\005entry\332A\021entry,update_mask" + ), + ), + _descriptor.MethodDescriptor( + name="DeleteEntry", + full_name="google.cloud.datacatalog.v1beta1.DataCatalog.DeleteEntry", + index=6, + containing_service=None, + input_type=_DELETEENTRYREQUEST, + output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, + serialized_options=_b( + "\202\323\344\223\002@*>/v1beta1/{name=projects/*/locations/*/entryGroups/*/entries/*}\332A\004name" ), ), _descriptor.MethodDescriptor( name="GetEntry", full_name="google.cloud.datacatalog.v1beta1.DataCatalog.GetEntry", - index=2, + index=7, containing_service=None, input_type=_GETENTRYREQUEST, output_type=_ENTRY, serialized_options=_b( - "\202\323\344\223\002@\022>/v1beta1/{name=projects/*/locations/*/entryGroups/*/entries/*}" + "\202\323\344\223\002@\022>/v1beta1/{name=projects/*/locations/*/entryGroups/*/entries/*}\332A\004name" ), ), _descriptor.MethodDescriptor( name="LookupEntry", full_name="google.cloud.datacatalog.v1beta1.DataCatalog.LookupEntry", - index=3, + index=8, containing_service=None, input_type=_LOOKUPENTRYREQUEST, output_type=_ENTRY, @@ -2233,166 +2963,166 @@ _descriptor.MethodDescriptor( name="CreateTagTemplate", full_name="google.cloud.datacatalog.v1beta1.DataCatalog.CreateTagTemplate", - index=4, + index=9, containing_service=None, input_type=_CREATETAGTEMPLATEREQUEST, output_type=google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_tags__pb2._TAGTEMPLATE, serialized_options=_b( - '\202\323\344\223\002E"5/v1beta1/{parent=projects/*/locations/*}/tagTemplates:\014tag_template' + '\202\323\344\223\002E"5/v1beta1/{parent=projects/*/locations/*}/tagTemplates:\014tag_template\332A#parent,tag_template_id,tag_template' ), ), _descriptor.MethodDescriptor( name="GetTagTemplate", full_name="google.cloud.datacatalog.v1beta1.DataCatalog.GetTagTemplate", - index=5, + index=10, containing_service=None, input_type=_GETTAGTEMPLATEREQUEST, output_type=google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_tags__pb2._TAGTEMPLATE, serialized_options=_b( - "\202\323\344\223\0027\0225/v1beta1/{name=projects/*/locations/*/tagTemplates/*}" + "\202\323\344\223\0027\0225/v1beta1/{name=projects/*/locations/*/tagTemplates/*}\332A\004name" ), ), _descriptor.MethodDescriptor( name="UpdateTagTemplate", full_name="google.cloud.datacatalog.v1beta1.DataCatalog.UpdateTagTemplate", - index=6, + index=11, containing_service=None, input_type=_UPDATETAGTEMPLATEREQUEST, output_type=google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_tags__pb2._TAGTEMPLATE, serialized_options=_b( - "\202\323\344\223\002R2B/v1beta1/{tag_template.name=projects/*/locations/*/tagTemplates/*}:\014tag_template" + "\202\323\344\223\002R2B/v1beta1/{tag_template.name=projects/*/locations/*/tagTemplates/*}:\014tag_template\332A\014tag_template\332A\030tag_template,update_mask" ), ), _descriptor.MethodDescriptor( name="DeleteTagTemplate", full_name="google.cloud.datacatalog.v1beta1.DataCatalog.DeleteTagTemplate", - index=7, + index=12, containing_service=None, input_type=_DELETETAGTEMPLATEREQUEST, output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, serialized_options=_b( - "\202\323\344\223\0027*5/v1beta1/{name=projects/*/locations/*/tagTemplates/*}" + "\202\323\344\223\0027*5/v1beta1/{name=projects/*/locations/*/tagTemplates/*}\332A\nname,force" ), ), _descriptor.MethodDescriptor( name="CreateTagTemplateField", full_name="google.cloud.datacatalog.v1beta1.DataCatalog.CreateTagTemplateField", - index=8, + index=13, containing_service=None, input_type=_CREATETAGTEMPLATEFIELDREQUEST, output_type=google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_tags__pb2._TAGTEMPLATEFIELD, serialized_options=_b( - '\202\323\344\223\002T">/v1beta1/{parent=projects/*/locations/*/tagTemplates/*}/fields:\022tag_template_field' + '\202\323\344\223\002T">/v1beta1/{parent=projects/*/locations/*/tagTemplates/*}/fields:\022tag_template_field\332A/parent,tag_template_field_id,tag_template_field' ), ), _descriptor.MethodDescriptor( name="UpdateTagTemplateField", full_name="google.cloud.datacatalog.v1beta1.DataCatalog.UpdateTagTemplateField", - index=9, + index=14, containing_service=None, input_type=_UPDATETAGTEMPLATEFIELDREQUEST, output_type=google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_tags__pb2._TAGTEMPLATEFIELD, serialized_options=_b( - "\202\323\344\223\002T2>/v1beta1/{name=projects/*/locations/*/tagTemplates/*/fields/*}:\022tag_template_field" + "\202\323\344\223\002T2>/v1beta1/{name=projects/*/locations/*/tagTemplates/*/fields/*}:\022tag_template_field\332A\027name,tag_template_field\332A#name,tag_template_field,update_mask" ), ), _descriptor.MethodDescriptor( name="RenameTagTemplateField", full_name="google.cloud.datacatalog.v1beta1.DataCatalog.RenameTagTemplateField", - index=10, + index=15, containing_service=None, input_type=_RENAMETAGTEMPLATEFIELDREQUEST, output_type=google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_tags__pb2._TAGTEMPLATEFIELD, serialized_options=_b( - '\202\323\344\223\002J"E/v1beta1/{name=projects/*/locations/*/tagTemplates/*/fields/*}:rename:\001*' + '\202\323\344\223\002J"E/v1beta1/{name=projects/*/locations/*/tagTemplates/*/fields/*}:rename:\001*\332A\036name,new_tag_template_field_id' ), ), _descriptor.MethodDescriptor( name="DeleteTagTemplateField", full_name="google.cloud.datacatalog.v1beta1.DataCatalog.DeleteTagTemplateField", - index=11, + index=16, containing_service=None, input_type=_DELETETAGTEMPLATEFIELDREQUEST, output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, serialized_options=_b( - "\202\323\344\223\002@*>/v1beta1/{name=projects/*/locations/*/tagTemplates/*/fields/*}" + "\202\323\344\223\002@*>/v1beta1/{name=projects/*/locations/*/tagTemplates/*/fields/*}\332A\nname,force" ), ), _descriptor.MethodDescriptor( name="CreateTag", full_name="google.cloud.datacatalog.v1beta1.DataCatalog.CreateTag", - index=12, + index=17, containing_service=None, input_type=_CREATETAGREQUEST, output_type=google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_tags__pb2._TAG, serialized_options=_b( - '\202\323\344\223\002L"E/v1beta1/{parent=projects/*/locations/*/entryGroups/*/entries/*}/tags:\003tag' + '\202\323\344\223\002L"E/v1beta1/{parent=projects/*/locations/*/entryGroups/*/entries/*}/tags:\003tag\332A\nparent,tag' ), ), _descriptor.MethodDescriptor( name="UpdateTag", full_name="google.cloud.datacatalog.v1beta1.DataCatalog.UpdateTag", - index=13, + index=18, containing_service=None, input_type=_UPDATETAGREQUEST, output_type=google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_tags__pb2._TAG, serialized_options=_b( - "\202\323\344\223\002P2I/v1beta1/{tag.name=projects/*/locations/*/entryGroups/*/entries/*/tags/*}:\003tag" + "\202\323\344\223\002P2I/v1beta1/{tag.name=projects/*/locations/*/entryGroups/*/entries/*/tags/*}:\003tag\332A\003tag\332A\017tag,update_mask" ), ), _descriptor.MethodDescriptor( name="DeleteTag", full_name="google.cloud.datacatalog.v1beta1.DataCatalog.DeleteTag", - index=14, + index=19, containing_service=None, input_type=_DELETETAGREQUEST, output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, serialized_options=_b( - "\202\323\344\223\002G*E/v1beta1/{name=projects/*/locations/*/entryGroups/*/entries/*/tags/*}" + "\202\323\344\223\002G*E/v1beta1/{name=projects/*/locations/*/entryGroups/*/entries/*/tags/*}\332A\004name" ), ), _descriptor.MethodDescriptor( name="ListTags", full_name="google.cloud.datacatalog.v1beta1.DataCatalog.ListTags", - index=15, + index=20, containing_service=None, input_type=_LISTTAGSREQUEST, output_type=_LISTTAGSRESPONSE, serialized_options=_b( - "\202\323\344\223\002G\022E/v1beta1/{parent=projects/*/locations/*/entryGroups/*/entries/*}/tags" + "\202\323\344\223\002G\022E/v1beta1/{parent=projects/*/locations/*/entryGroups/*/entries/*}/tags\332A\006parent" ), ), _descriptor.MethodDescriptor( name="SetIamPolicy", full_name="google.cloud.datacatalog.v1beta1.DataCatalog.SetIamPolicy", - index=16, + index=21, containing_service=None, input_type=google_dot_iam_dot_v1_dot_iam__policy__pb2._SETIAMPOLICYREQUEST, output_type=google_dot_iam_dot_v1_dot_policy__pb2._POLICY, serialized_options=_b( - '\202\323\344\223\002K"F/v1beta1/{resource=projects/*/locations/*/tagTemplates/*}:setIamPolicy:\001*' + '\202\323\344\223\002\355\001"F/v1beta1/{resource=projects/*/locations/*/tagTemplates/*}:setIamPolicy:\001*ZJ"E/v1beta1/{resource=projects/*/locations/*/entryGroups/*}:setIamPolicy:\001*ZT"O/v1beta1/{resource=projects/*/locations/*/entryGroups/*/entries/*}:setIamPolicy:\001*' ), ), _descriptor.MethodDescriptor( name="GetIamPolicy", full_name="google.cloud.datacatalog.v1beta1.DataCatalog.GetIamPolicy", - index=17, + index=22, containing_service=None, input_type=google_dot_iam_dot_v1_dot_iam__policy__pb2._GETIAMPOLICYREQUEST, output_type=google_dot_iam_dot_v1_dot_policy__pb2._POLICY, serialized_options=_b( - '\202\323\344\223\002K"F/v1beta1/{resource=projects/*/locations/*/tagTemplates/*}:getIamPolicy:\001*' + '\202\323\344\223\002\355\001"F/v1beta1/{resource=projects/*/locations/*/tagTemplates/*}:getIamPolicy:\001*ZJ"E/v1beta1/{resource=projects/*/locations/*/entryGroups/*}:getIamPolicy:\001*ZT"O/v1beta1/{resource=projects/*/locations/*/entryGroups/*/entries/*}:getIamPolicy:\001*' ), ), _descriptor.MethodDescriptor( name="TestIamPermissions", full_name="google.cloud.datacatalog.v1beta1.DataCatalog.TestIamPermissions", - index=18, + index=23, containing_service=None, input_type=google_dot_iam_dot_v1_dot_iam__policy__pb2._TESTIAMPERMISSIONSREQUEST, output_type=google_dot_iam_dot_v1_dot_iam__policy__pb2._TESTIAMPERMISSIONSRESPONSE, serialized_options=_b( - '\202\323\344\223\002Q"L/v1beta1/{resource=projects/*/locations/*/tagTemplates/*}:testIamPermissions:\001*' + '\202\323\344\223\002\377\001"L/v1beta1/{resource=projects/*/locations/*/tagTemplates/*}:testIamPermissions:\001*ZP"K/v1beta1/{resource=projects/*/locations/*/entryGroups/*}:testIamPermissions:\001*ZZ"U/v1beta1/{resource=projects/*/locations/*/entryGroups/*/entries/*}:testIamPermissions:\001*' ), ), ], diff --git a/datacatalog/google/cloud/datacatalog_v1beta1/proto/datacatalog_pb2_grpc.py b/datacatalog/google/cloud/datacatalog_v1beta1/proto/datacatalog_pb2_grpc.py index c9af06b41c0c..3364f7f20fb4 100644 --- a/datacatalog/google/cloud/datacatalog_v1beta1/proto/datacatalog_pb2_grpc.py +++ b/datacatalog/google/cloud/datacatalog_v1beta1/proto/datacatalog_pb2_grpc.py @@ -28,11 +28,36 @@ def __init__(self, channel): request_serializer=google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_datacatalog__pb2.SearchCatalogRequest.SerializeToString, response_deserializer=google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_datacatalog__pb2.SearchCatalogResponse.FromString, ) + self.CreateEntryGroup = channel.unary_unary( + "/google.cloud.datacatalog.v1beta1.DataCatalog/CreateEntryGroup", + request_serializer=google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_datacatalog__pb2.CreateEntryGroupRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_datacatalog__pb2.EntryGroup.FromString, + ) + self.GetEntryGroup = channel.unary_unary( + "/google.cloud.datacatalog.v1beta1.DataCatalog/GetEntryGroup", + request_serializer=google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_datacatalog__pb2.GetEntryGroupRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_datacatalog__pb2.EntryGroup.FromString, + ) + self.DeleteEntryGroup = channel.unary_unary( + "/google.cloud.datacatalog.v1beta1.DataCatalog/DeleteEntryGroup", + request_serializer=google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_datacatalog__pb2.DeleteEntryGroupRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.CreateEntry = channel.unary_unary( + "/google.cloud.datacatalog.v1beta1.DataCatalog/CreateEntry", + request_serializer=google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_datacatalog__pb2.CreateEntryRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_datacatalog__pb2.Entry.FromString, + ) self.UpdateEntry = channel.unary_unary( "/google.cloud.datacatalog.v1beta1.DataCatalog/UpdateEntry", request_serializer=google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_datacatalog__pb2.UpdateEntryRequest.SerializeToString, response_deserializer=google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_datacatalog__pb2.Entry.FromString, ) + self.DeleteEntry = channel.unary_unary( + "/google.cloud.datacatalog.v1beta1.DataCatalog/DeleteEntry", + request_serializer=google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_datacatalog__pb2.DeleteEntryRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) self.GetEntry = channel.unary_unary( "/google.cloud.datacatalog.v1beta1.DataCatalog/GetEntry", request_serializer=google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_datacatalog__pb2.GetEntryRequest.SerializeToString, @@ -132,7 +157,7 @@ def SearchCatalog(self, request, context): This is a custom method (https://cloud.google.com/apis/design/custom_methods) and does not return the complete resource, only the resource identifier and high level - fields. Clients can subsequentally call Get methods. + fields. Clients can subsequentally call `Get` methods. Note that searches do not have full recall. There may be results that match your query but are not returned, even in subsequent pages of results. These @@ -146,8 +171,65 @@ def SearchCatalog(self, request, context): context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") + def CreateEntryGroup(self, request, context): + """Alpha feature. + Creates an EntryGroup. + The user should enable the Data Catalog API in the project identified by + the `parent` parameter (see [Data Catalog Resource Project] + (/data-catalog/docs/concepts/resource-project) for more information). + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def GetEntryGroup(self, request, context): + """Alpha feature. + Gets an EntryGroup. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def DeleteEntryGroup(self, request, context): + """Alpha feature. + Deletes an EntryGroup. Only entry groups that do not contain entries can be + deleted. The user should enable the Data Catalog API in the project + identified by the `name` parameter (see [Data Catalog Resource Project] + (/data-catalog/docs/concepts/resource-project) for more information). + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def CreateEntry(self, request, context): + """Alpha feature. + Creates an entry. Currently only entries of 'FILESET' type can be created. + The user should enable the Data Catalog API in the project identified by + the `parent` parameter (see [Data Catalog Resource Project] + (/data-catalog/docs/concepts/resource-project) for more information). + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + def UpdateEntry(self, request, context): """Updates an existing entry. + The user should enable the Data Catalog API in the project identified by + the `entry.name` parameter (see [Data Catalog Resource Project] + (/data-catalog/docs/concepts/resource-project) for more information). + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def DeleteEntry(self, request, context): + """Alpha feature. + Deletes an existing entry. Only entries created through + [CreateEntry][google.cloud.datacatalog.v1beta1.DataCatalog.CreateEntry] + method can be deleted. + The user should enable the Data Catalog API in the project identified by + the `name` parameter (see [Data Catalog Resource Project] + (/data-catalog/docs/concepts/resource-project) for more information). """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") @@ -170,7 +252,10 @@ def LookupEntry(self, request, context): raise NotImplementedError("Method not implemented!") def CreateTagTemplate(self, request, context): - """Creates a tag template. + """Creates a tag template. The user should enable the Data Catalog API in + the project identified by the `parent` parameter (see [Data Catalog + Resource Project](/data-catalog/docs/concepts/resource-project) for more + information). """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") @@ -187,6 +272,9 @@ def UpdateTagTemplate(self, request, context): """Updates a tag template. This method cannot be used to update the fields of a template. The tag template fields are represented as separate resources and should be updated using their own create/update/delete methods. + The user should enable the Data Catalog API in the project identified by + the `tag_template.name` parameter (see [Data Catalog Resource Project] + (/data-catalog/docs/concepts/resource-project) for more information). """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") @@ -194,13 +282,20 @@ def UpdateTagTemplate(self, request, context): def DeleteTagTemplate(self, request, context): """Deletes a tag template and all tags using the template. + The user should enable the Data Catalog API in the project identified by + the `name` parameter (see [Data Catalog Resource Project] + (/data-catalog/docs/concepts/resource-project) for more information). """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def CreateTagTemplateField(self, request, context): - """Creates a field in a tag template. + """Creates a field in a tag template. The user should enable the Data Catalog + API in the project identified by the `parent` parameter (see + [Data Catalog Resource + Project](/data-catalog/docs/concepts/resource-project) for more + information). """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") @@ -208,14 +303,19 @@ def CreateTagTemplateField(self, request, context): def UpdateTagTemplateField(self, request, context): """Updates a field in a tag template. This method cannot be used to update the - field type. + field type. The user should enable the Data Catalog API in the project + identified by the `name` parameter (see [Data Catalog Resource Project] + (/data-catalog/docs/concepts/resource-project) for more information). """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def RenameTagTemplateField(self, request, context): - """Renames a field in a tag template. + """Renames a field in a tag template. The user should enable the Data Catalog + API in the project identified by the `name` parameter (see [Data Catalog + Resource Project](/data-catalog/docs/concepts/resource-project) for more + information). """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") @@ -223,6 +323,9 @@ def RenameTagTemplateField(self, request, context): def DeleteTagTemplateField(self, request, context): """Deletes a field in a tag template and all uses of that field. + The user should enable the Data Catalog API in the project identified by + the `name` parameter (see [Data Catalog Resource Project] + (/data-catalog/docs/concepts/resource-project) for more information). """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") @@ -230,6 +333,12 @@ def DeleteTagTemplateField(self, request, context): def CreateTag(self, request, context): """Creates a tag on an [Entry][google.cloud.datacatalog.v1beta1.Entry]. + Note: The project identified by the `parent` parameter for the + [tag](/data-catalog/docs/reference/rest/v1beta1/projects.locations.entryGroups.entries.tags/create#path-parameters) + and the + [tag + template](/data-catalog/docs/reference/rest/v1beta1/projects.locations.tagTemplates/create#path-parameters) + used to create the tag must be from the same organization. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") @@ -261,12 +370,17 @@ def SetIamPolicy(self, request, context): policy. Supported resources are: - Tag templates. + - Entries. + - Entry groups. Note, this method cannot be used to manage policies for BigQuery, Cloud Pub/Sub and any external Google Cloud Platform resources synced to Cloud Data Catalog. Callers must have following Google IAM permission - `datacatalog.tagTemplates.setIamPolicy` to set policies on tag templates. + - `datacatalog.tagTemplates.setIamPolicy` to set policies on tag + templates. + - `datacatalog.entries.setIamPolicy` to set policies on entries. + - `datacatalog.entryGroups.setIamPolicy` to set policies on entry groups. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") @@ -279,12 +393,17 @@ def GetIamPolicy(self, request, context): Supported resources are: - Tag templates. + - Entries. + - Entry groups. Note, this method cannot be used to manage policies for BigQuery, Cloud Pub/Sub and any external Google Cloud Platform resources synced to Cloud Data Catalog. Callers must have following Google IAM permission - `datacatalog.tagTemplates.getIamPolicy` to get policies on tag templates. + - `datacatalog.tagTemplates.getIamPolicy` to get policies on tag + templates. + - `datacatalog.entries.getIamPolicy` to get policies on entries. + - `datacatalog.entryGroups.getIamPolicy` to get policies on entry groups. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") @@ -295,8 +414,10 @@ def TestIamPermissions(self, request, context): If the resource does not exist, an empty set of permissions is returned (We don't return a `NOT_FOUND` error). - Supported resource are: - - tag templates. + Supported resources are: + - Tag templates. + - Entries. + - Entry groups. Note, this method cannot be used to manage policies for BigQuery, Cloud Pub/Sub and any external Google Cloud Platform resources synced to Cloud Data Catalog. @@ -316,11 +437,36 @@ def add_DataCatalogServicer_to_server(servicer, server): request_deserializer=google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_datacatalog__pb2.SearchCatalogRequest.FromString, response_serializer=google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_datacatalog__pb2.SearchCatalogResponse.SerializeToString, ), + "CreateEntryGroup": grpc.unary_unary_rpc_method_handler( + servicer.CreateEntryGroup, + request_deserializer=google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_datacatalog__pb2.CreateEntryGroupRequest.FromString, + response_serializer=google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_datacatalog__pb2.EntryGroup.SerializeToString, + ), + "GetEntryGroup": grpc.unary_unary_rpc_method_handler( + servicer.GetEntryGroup, + request_deserializer=google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_datacatalog__pb2.GetEntryGroupRequest.FromString, + response_serializer=google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_datacatalog__pb2.EntryGroup.SerializeToString, + ), + "DeleteEntryGroup": grpc.unary_unary_rpc_method_handler( + servicer.DeleteEntryGroup, + request_deserializer=google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_datacatalog__pb2.DeleteEntryGroupRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + "CreateEntry": grpc.unary_unary_rpc_method_handler( + servicer.CreateEntry, + request_deserializer=google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_datacatalog__pb2.CreateEntryRequest.FromString, + response_serializer=google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_datacatalog__pb2.Entry.SerializeToString, + ), "UpdateEntry": grpc.unary_unary_rpc_method_handler( servicer.UpdateEntry, request_deserializer=google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_datacatalog__pb2.UpdateEntryRequest.FromString, response_serializer=google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_datacatalog__pb2.Entry.SerializeToString, ), + "DeleteEntry": grpc.unary_unary_rpc_method_handler( + servicer.DeleteEntry, + request_deserializer=google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_datacatalog__pb2.DeleteEntryRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), "GetEntry": grpc.unary_unary_rpc_method_handler( servicer.GetEntry, request_deserializer=google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_datacatalog__pb2.GetEntryRequest.FromString, diff --git a/datacatalog/google/cloud/datacatalog_v1beta1/proto/gcs_fileset_spec.proto b/datacatalog/google/cloud/datacatalog_v1beta1/proto/gcs_fileset_spec.proto new file mode 100644 index 000000000000..e7397d054365 --- /dev/null +++ b/datacatalog/google/cloud/datacatalog_v1beta1/proto/gcs_fileset_spec.proto @@ -0,0 +1,59 @@ +// Copyright 2019 Google LLC. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +syntax = "proto3"; + +package google.cloud.datacatalog.v1beta1; + +import "google/api/field_behavior.proto"; +import "google/cloud/datacatalog/v1beta1/timestamps.proto"; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1;datacatalog"; +option java_multiple_files = true; +option java_package = "com.google.cloud.datacatalog"; + +// Describes a Cloud Storage fileset entry. +message GcsFilesetSpec { + // Required. Patterns to identify a set of files in Google Cloud Storage. + // + // Examples of valid file_patterns: + // + // * `gs://bucket_name/*`: matches all files in `bucket_name` + // * `gs://bucket_name/file*`: matches files prefixed by `file` in + // `bucket_name` + // * `gs://bucket_name/a/*/b`: matches all files in `bucket_name` that match + // `a/*/b` pattern, such as `a/c/b`, `a/d/b` + // * `gs://another_bucket/a.txt`: matches `gs://another_bucket/a.txt` + repeated string file_patterns = 1 [(google.api.field_behavior) = REQUIRED]; + + // Output only. Sample files contained in this fileset, not all files + // contained in this fileset are represented here. + repeated GcsFileSpec sample_gcs_file_specs = 2 + [(google.api.field_behavior) = OUTPUT_ONLY]; +} + +// Specifications of a single file in GCS. +message GcsFileSpec { + // Required. The full file path. Example: `gs://bucket_name/a/b.txt`. + string file_path = 1 [(google.api.field_behavior) = REQUIRED]; + + // Output only. Timestamps about the GCS file. + SystemTimestamps gcs_timestamps = 2 + [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. The size of the file, in bytes. + int64 size_bytes = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; +} diff --git a/datacatalog/google/cloud/datacatalog_v1beta1/proto/gcs_fileset_spec_pb2.py b/datacatalog/google/cloud/datacatalog_v1beta1/proto/gcs_fileset_spec_pb2.py new file mode 100644 index 000000000000..94aee77c5677 --- /dev/null +++ b/datacatalog/google/cloud/datacatalog_v1beta1/proto/gcs_fileset_spec_pb2.py @@ -0,0 +1,240 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/datacatalog_v1beta1/proto/gcs_fileset_spec.proto + +import sys + +_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 +from google.cloud.datacatalog_v1beta1.proto import ( + timestamps_pb2 as google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_timestamps__pb2, +) + + +DESCRIPTOR = _descriptor.FileDescriptor( + name="google/cloud/datacatalog_v1beta1/proto/gcs_fileset_spec.proto", + package="google.cloud.datacatalog.v1beta1", + syntax="proto3", + serialized_options=_b( + "\n\034com.google.cloud.datacatalogP\001ZKgoogle.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1;datacatalog\370\001\001" + ), + serialized_pb=_b( + '\n=google/cloud/datacatalog_v1beta1/proto/gcs_fileset_spec.proto\x12 google.cloud.datacatalog.v1beta1\x1a\x1fgoogle/api/field_behavior.proto\x1a\x37google/cloud/datacatalog_v1beta1/proto/timestamps.proto"\x7f\n\x0eGcsFilesetSpec\x12\x1a\n\rfile_patterns\x18\x01 \x03(\tB\x03\xe0\x41\x02\x12Q\n\x15sample_gcs_file_specs\x18\x02 \x03(\x0b\x32-.google.cloud.datacatalog.v1beta1.GcsFileSpecB\x03\xe0\x41\x03"\x8f\x01\n\x0bGcsFileSpec\x12\x16\n\tfile_path\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12O\n\x0egcs_timestamps\x18\x02 \x01(\x0b\x32\x32.google.cloud.datacatalog.v1beta1.SystemTimestampsB\x03\xe0\x41\x03\x12\x17\n\nsize_bytes\x18\x04 \x01(\x03\x42\x03\xe0\x41\x03\x42p\n\x1c\x63om.google.cloud.datacatalogP\x01ZKgoogle.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1;datacatalog\xf8\x01\x01\x62\x06proto3' + ), + dependencies=[ + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, + google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_timestamps__pb2.DESCRIPTOR, + ], +) + + +_GCSFILESETSPEC = _descriptor.Descriptor( + name="GcsFilesetSpec", + full_name="google.cloud.datacatalog.v1beta1.GcsFilesetSpec", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="file_patterns", + full_name="google.cloud.datacatalog.v1beta1.GcsFilesetSpec.file_patterns", + index=0, + number=1, + type=9, + cpp_type=9, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\002"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="sample_gcs_file_specs", + full_name="google.cloud.datacatalog.v1beta1.GcsFilesetSpec.sample_gcs_file_specs", + index=1, + number=2, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\003"), + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=189, + serialized_end=316, +) + + +_GCSFILESPEC = _descriptor.Descriptor( + name="GcsFileSpec", + full_name="google.cloud.datacatalog.v1beta1.GcsFileSpec", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="file_path", + full_name="google.cloud.datacatalog.v1beta1.GcsFileSpec.file_path", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\002"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="gcs_timestamps", + full_name="google.cloud.datacatalog.v1beta1.GcsFileSpec.gcs_timestamps", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\003"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="size_bytes", + full_name="google.cloud.datacatalog.v1beta1.GcsFileSpec.size_bytes", + index=2, + number=4, + type=3, + cpp_type=2, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\003"), + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=319, + serialized_end=462, +) + +_GCSFILESETSPEC.fields_by_name["sample_gcs_file_specs"].message_type = _GCSFILESPEC +_GCSFILESPEC.fields_by_name[ + "gcs_timestamps" +].message_type = ( + google_dot_cloud_dot_datacatalog__v1beta1_dot_proto_dot_timestamps__pb2._SYSTEMTIMESTAMPS +) +DESCRIPTOR.message_types_by_name["GcsFilesetSpec"] = _GCSFILESETSPEC +DESCRIPTOR.message_types_by_name["GcsFileSpec"] = _GCSFILESPEC +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +GcsFilesetSpec = _reflection.GeneratedProtocolMessageType( + "GcsFilesetSpec", + (_message.Message,), + dict( + DESCRIPTOR=_GCSFILESETSPEC, + __module__="google.cloud.datacatalog_v1beta1.proto.gcs_fileset_spec_pb2", + __doc__="""Describes a Cloud Storage fileset entry. + + + Attributes: + file_patterns: + Required. Patterns to identify a set of files in Google Cloud + Storage. Examples of valid file\_patterns: - + ``gs://bucket_name/*``: matches all files in ``bucket_name`` - + ``gs://bucket_name/file*``: matches files prefixed by ``file`` + in ``bucket_name`` - ``gs://bucket_name/a/*/b``: matches + all files in ``bucket_name`` that match ``a/*/b`` pattern, + such as ``a/c/b``, ``a/d/b`` - ``gs://another_bucket/a.txt``: + matches ``gs://another_bucket/a.txt`` + sample_gcs_file_specs: + Output only. Sample files contained in this fileset, not all + files contained in this fileset are represented here. + """, + # @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1beta1.GcsFilesetSpec) + ), +) +_sym_db.RegisterMessage(GcsFilesetSpec) + +GcsFileSpec = _reflection.GeneratedProtocolMessageType( + "GcsFileSpec", + (_message.Message,), + dict( + DESCRIPTOR=_GCSFILESPEC, + __module__="google.cloud.datacatalog_v1beta1.proto.gcs_fileset_spec_pb2", + __doc__="""Specifications of a single file in GCS. + + + Attributes: + file_path: + Required. The full file path. Example: + ``gs://bucket_name/a/b.txt``. + gcs_timestamps: + Output only. Timestamps about the GCS file. + size_bytes: + Output only. The size of the file, in bytes. + """, + # @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1beta1.GcsFileSpec) + ), +) +_sym_db.RegisterMessage(GcsFileSpec) + + +DESCRIPTOR._options = None +_GCSFILESETSPEC.fields_by_name["file_patterns"]._options = None +_GCSFILESETSPEC.fields_by_name["sample_gcs_file_specs"]._options = None +_GCSFILESPEC.fields_by_name["file_path"]._options = None +_GCSFILESPEC.fields_by_name["gcs_timestamps"]._options = None +_GCSFILESPEC.fields_by_name["size_bytes"]._options = None +# @@protoc_insertion_point(module_scope) diff --git a/datacatalog/google/cloud/datacatalog_v1beta1/proto/gcs_fileset_spec_pb2_grpc.py b/datacatalog/google/cloud/datacatalog_v1beta1/proto/gcs_fileset_spec_pb2_grpc.py new file mode 100644 index 000000000000..07cb78fe03a9 --- /dev/null +++ b/datacatalog/google/cloud/datacatalog_v1beta1/proto/gcs_fileset_spec_pb2_grpc.py @@ -0,0 +1,2 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +import grpc diff --git a/datacatalog/google/cloud/datacatalog_v1beta1/proto/schema.proto b/datacatalog/google/cloud/datacatalog_v1beta1/proto/schema.proto index 839ef1d8ca50..aca588b4503b 100644 --- a/datacatalog/google/cloud/datacatalog_v1beta1/proto/schema.proto +++ b/datacatalog/google/cloud/datacatalog_v1beta1/proto/schema.proto @@ -17,6 +17,8 @@ syntax = "proto3"; package google.cloud.datacatalog.v1beta1; +import "google/api/field_behavior.proto"; + option cc_enable_arenas = true; option go_package = "google.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1;datacatalog"; option java_multiple_files = true; @@ -26,27 +28,27 @@ option java_package = "com.google.cloud.datacatalog"; message Schema { // Required. Schema of columns. A maximum of 10,000 columns and sub-columns // can be specified. - repeated ColumnSchema columns = 2; + repeated ColumnSchema columns = 2 [(google.api.field_behavior) = REQUIRED]; } // Representation of a column within a schema. Columns could be nested inside // other columns. message ColumnSchema { // Required. Name of the column. - string column = 6; + string column = 6 [(google.api.field_behavior) = REQUIRED]; // Required. Type of the column. - string type = 1; + string type = 1 [(google.api.field_behavior) = REQUIRED]; // Optional. Description of the column. Default value is an empty string. - string description = 2; + string description = 2 [(google.api.field_behavior) = OPTIONAL]; // Optional. A column's mode indicates whether the values in this column are - // required, nullable, etc. Only 'NULLABLE', 'REQUIRED' and 'REPEATED' are - // supported. Default mode is 'NULLABLE'. - string mode = 3; + // required, nullable, etc. Only `NULLABLE`, `REQUIRED` and `REPEATED` are + // supported. Default mode is `NULLABLE`. + string mode = 3 [(google.api.field_behavior) = OPTIONAL]; // Optional. Schema of sub-columns. A column can have zero or more // sub-columns. - repeated ColumnSchema subcolumns = 7; + repeated ColumnSchema subcolumns = 7 [(google.api.field_behavior) = OPTIONAL]; } diff --git a/datacatalog/google/cloud/datacatalog_v1beta1/proto/schema_pb2.py b/datacatalog/google/cloud/datacatalog_v1beta1/proto/schema_pb2.py index e7643ae11df4..b5ac8e07cc8c 100644 --- a/datacatalog/google/cloud/datacatalog_v1beta1/proto/schema_pb2.py +++ b/datacatalog/google/cloud/datacatalog_v1beta1/proto/schema_pb2.py @@ -15,6 +15,9 @@ _sym_db = _symbol_database.Default() +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 + + DESCRIPTOR = _descriptor.FileDescriptor( name="google/cloud/datacatalog_v1beta1/proto/schema.proto", package="google.cloud.datacatalog.v1beta1", @@ -23,8 +26,9 @@ "\n\034com.google.cloud.datacatalogP\001ZKgoogle.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1;datacatalog\370\001\001" ), serialized_pb=_b( - '\n3google/cloud/datacatalog_v1beta1/proto/schema.proto\x12 google.cloud.datacatalog.v1beta1"I\n\x06Schema\x12?\n\x07\x63olumns\x18\x02 \x03(\x0b\x32..google.cloud.datacatalog.v1beta1.ColumnSchema"\x93\x01\n\x0c\x43olumnSchema\x12\x0e\n\x06\x63olumn\x18\x06 \x01(\t\x12\x0c\n\x04type\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0c\n\x04mode\x18\x03 \x01(\t\x12\x42\n\nsubcolumns\x18\x07 \x03(\x0b\x32..google.cloud.datacatalog.v1beta1.ColumnSchemaBp\n\x1c\x63om.google.cloud.datacatalogP\x01ZKgoogle.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1;datacatalog\xf8\x01\x01\x62\x06proto3' + '\n3google/cloud/datacatalog_v1beta1/proto/schema.proto\x12 google.cloud.datacatalog.v1beta1\x1a\x1fgoogle/api/field_behavior.proto"N\n\x06Schema\x12\x44\n\x07\x63olumns\x18\x02 \x03(\x0b\x32..google.cloud.datacatalog.v1beta1.ColumnSchemaB\x03\xe0\x41\x02"\xac\x01\n\x0c\x43olumnSchema\x12\x13\n\x06\x63olumn\x18\x06 \x01(\tB\x03\xe0\x41\x02\x12\x11\n\x04type\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x18\n\x0b\x64\x65scription\x18\x02 \x01(\tB\x03\xe0\x41\x01\x12\x11\n\x04mode\x18\x03 \x01(\tB\x03\xe0\x41\x01\x12G\n\nsubcolumns\x18\x07 \x03(\x0b\x32..google.cloud.datacatalog.v1beta1.ColumnSchemaB\x03\xe0\x41\x01\x42p\n\x1c\x63om.google.cloud.datacatalogP\x01ZKgoogle.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1;datacatalog\xf8\x01\x01\x62\x06proto3' ), + dependencies=[google_dot_api_dot_field__behavior__pb2.DESCRIPTOR], ) @@ -50,7 +54,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ) ], @@ -62,8 +66,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=89, - serialized_end=162, + serialized_start=122, + serialized_end=200, ) @@ -89,7 +93,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -107,7 +111,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -125,7 +129,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -143,7 +147,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -161,7 +165,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -173,8 +177,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=165, - serialized_end=312, + serialized_start=203, + serialized_end=375, ) _SCHEMA.fields_by_name["columns"].message_type = _COLUMNSCHEMA @@ -222,9 +226,9 @@ string. mode: Optional. A column's mode indicates whether the values in this - column are required, nullable, etc. Only 'NULLABLE', - 'REQUIRED' and 'REPEATED' are supported. Default mode is - 'NULLABLE'. + column are required, nullable, etc. Only ``NULLABLE``, + ``REQUIRED`` and ``REPEATED`` are supported. Default mode is + ``NULLABLE``. subcolumns: Optional. Schema of sub-columns. A column can have zero or more sub-columns. @@ -236,4 +240,10 @@ DESCRIPTOR._options = None +_SCHEMA.fields_by_name["columns"]._options = None +_COLUMNSCHEMA.fields_by_name["column"]._options = None +_COLUMNSCHEMA.fields_by_name["type"]._options = None +_COLUMNSCHEMA.fields_by_name["description"]._options = None +_COLUMNSCHEMA.fields_by_name["mode"]._options = None +_COLUMNSCHEMA.fields_by_name["subcolumns"]._options = None # @@protoc_insertion_point(module_scope) diff --git a/datacatalog/google/cloud/datacatalog_v1beta1/proto/search.proto b/datacatalog/google/cloud/datacatalog_v1beta1/proto/search.proto index 2a31dd94543a..372c1573c3db 100644 --- a/datacatalog/google/cloud/datacatalog_v1beta1/proto/search.proto +++ b/datacatalog/google/cloud/datacatalog_v1beta1/proto/search.proto @@ -17,7 +17,7 @@ syntax = "proto3"; package google.cloud.datacatalog.v1beta1; -import "google/api/annotations.proto"; +import "google/api/field_behavior.proto"; import "google/protobuf/timestamp.proto"; option cc_enable_arenas = true; @@ -34,20 +34,22 @@ message SearchCatalogResult { // Sub-type of the search result. This is a dot-delimited description of the // resource's full type, and is the same as the value callers would provide in - // the "type" search facet. Examples: "entry.table", "entry.dataStream", - // "tagTemplate" + // the "type" search facet. Examples: `entry.table`, `entry.dataStream`, + // `tagTemplate`. string search_result_subtype = 2; // The relative resource name of the resource in URL format. // Examples: - // "projects/{project_id}/locations/{location_id}/entryGroups/{entry_group_id}/entries/{entry_id}". - // "projects/{project_id}/tagTemplates/{tag_template_id}". + // + // * `projects/{project_id}/locations/{location_id}/entryGroups/{entry_group_id}/entries/{entry_id}` + // * `projects/{project_id}/tagTemplates/{tag_template_id}` string relative_resource_name = 3; // The full name of the cloud resource the entry belongs to. See: - // https://cloud.google.com/apis/design/resource_names#full_resource_name + // https://cloud.google.com/apis/design/resource_names#full_resource_name. // Example: - // "//bigquery.googleapis.com/projects/projectId/datasets/datasetId/tables/tableId". + // + // * `//bigquery.googleapis.com/projects/projectId/datasets/datasetId/tables/tableId` string linked_resource = 4; } diff --git a/datacatalog/google/cloud/datacatalog_v1beta1/proto/search_pb2.py b/datacatalog/google/cloud/datacatalog_v1beta1/proto/search_pb2.py index 5c7ea4466f44..480288089982 100644 --- a/datacatalog/google/cloud/datacatalog_v1beta1/proto/search_pb2.py +++ b/datacatalog/google/cloud/datacatalog_v1beta1/proto/search_pb2.py @@ -16,7 +16,7 @@ _sym_db = _symbol_database.Default() -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 @@ -28,10 +28,10 @@ "\n\034com.google.cloud.datacatalogP\001ZKgoogle.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1;datacatalog\370\001\001" ), serialized_pb=_b( - '\n3google/cloud/datacatalog_v1beta1/proto/search.proto\x12 google.cloud.datacatalog.v1beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xbd\x01\n\x13SearchCatalogResult\x12N\n\x12search_result_type\x18\x01 \x01(\x0e\x32\x32.google.cloud.datacatalog.v1beta1.SearchResultType\x12\x1d\n\x15search_result_subtype\x18\x02 \x01(\t\x12\x1e\n\x16relative_resource_name\x18\x03 \x01(\t\x12\x17\n\x0flinked_resource\x18\x04 \x01(\t*d\n\x10SearchResultType\x12"\n\x1eSEARCH_RESULT_TYPE_UNSPECIFIED\x10\x00\x12\t\n\x05\x45NTRY\x10\x01\x12\x10\n\x0cTAG_TEMPLATE\x10\x02\x12\x0f\n\x0b\x45NTRY_GROUP\x10\x03\x42p\n\x1c\x63om.google.cloud.datacatalogP\x01ZKgoogle.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1;datacatalog\xf8\x01\x01\x62\x06proto3' + '\n3google/cloud/datacatalog_v1beta1/proto/search.proto\x12 google.cloud.datacatalog.v1beta1\x1a\x1fgoogle/api/field_behavior.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xbd\x01\n\x13SearchCatalogResult\x12N\n\x12search_result_type\x18\x01 \x01(\x0e\x32\x32.google.cloud.datacatalog.v1beta1.SearchResultType\x12\x1d\n\x15search_result_subtype\x18\x02 \x01(\t\x12\x1e\n\x16relative_resource_name\x18\x03 \x01(\t\x12\x17\n\x0flinked_resource\x18\x04 \x01(\t*d\n\x10SearchResultType\x12"\n\x1eSEARCH_RESULT_TYPE_UNSPECIFIED\x10\x00\x12\t\n\x05\x45NTRY\x10\x01\x12\x10\n\x0cTAG_TEMPLATE\x10\x02\x12\x0f\n\x0b\x45NTRY_GROUP\x10\x03\x42p\n\x1c\x63om.google.cloud.datacatalogP\x01ZKgoogle.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1;datacatalog\xf8\x01\x01\x62\x06proto3' ), dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, ], ) @@ -61,8 +61,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=344, - serialized_end=444, + serialized_start=347, + serialized_end=447, ) _sym_db.RegisterEnumDescriptor(_SEARCHRESULTTYPE) @@ -161,8 +161,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=153, - serialized_end=342, + serialized_start=156, + serialized_end=345, ) _SEARCHCATALOGRESULT.fields_by_name["search_result_type"].enum_type = _SEARCHRESULTTYPE @@ -188,17 +188,18 @@ Sub-type of the search result. This is a dot-delimited description of the resource's full type, and is the same as the value callers would provide in the "type" search facet. - Examples: "entry.table", "entry.dataStream", "tagTemplate" + Examples: ``entry.table``, ``entry.dataStream``, + ``tagTemplate``. relative_resource_name: The relative resource name of the resource in URL format. - Examples: "projects/{project\_id}/locations/{location\_id}/ent - ryGroups/{entry\_group\_id}/entries/{entry\_id}". - "projects/{project\_id}/tagTemplates/{tag\_template\_id}". + Examples: - ``projects/{project_id}/locations/{location_id}/ + entryGroups/{entry_group_id}/entries/{entry_id}`` - + ``projects/{project_id}/tagTemplates/{tag_template_id}`` linked_resource: The full name of the cloud resource the entry belongs to. See: https://cloud.google.com/apis/design/resource\_names#full\_res - ource\_name Example: "//bigquery.googleapis.com/projects/proje - ctId/datasets/datasetId/tables/tableId". + ource\_name. Example: - ``//bigquery.googleapis.com/projects + /projectId/datasets/datasetId/tables/tableId`` """, # @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1beta1.SearchCatalogResult) ), diff --git a/datacatalog/google/cloud/datacatalog_v1beta1/proto/table_spec.proto b/datacatalog/google/cloud/datacatalog_v1beta1/proto/table_spec.proto index 8e9547fea783..4f9fddaaf97b 100644 --- a/datacatalog/google/cloud/datacatalog_v1beta1/proto/table_spec.proto +++ b/datacatalog/google/cloud/datacatalog_v1beta1/proto/table_spec.proto @@ -17,6 +17,9 @@ syntax = "proto3"; package google.cloud.datacatalog.v1beta1; +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; + option cc_enable_arenas = true; option go_package = "google.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1;datacatalog"; option java_multiple_files = true; @@ -25,16 +28,17 @@ option java_package = "com.google.cloud.datacatalog"; // Describes a BigQuery table. message BigQueryTableSpec { // Output only. The table source type. - TableSourceType table_source_type = 1; + TableSourceType table_source_type = 1 + [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. oneof type_spec { // Table view specification. This field should only be populated if - // table_source_type is BIGQUERY_VIEW. + // `table_source_type` is `BIGQUERY_VIEW`. ViewSpec view_spec = 2; // Spec of a BigQuery table. This field should only be populated if - // table_source_type is BIGQUERY_TABLE. + // `table_source_type` is `BIGQUERY_TABLE`. TableSpec table_spec = 3; } } @@ -54,35 +58,44 @@ enum TableSourceType { // Table view specification. message ViewSpec { // Output only. The query that defines the table view. - string view_query = 1; + string view_query = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; } // Normal BigQuery table spec. message TableSpec { - // Output only. If the table is a dated shard, i.e. with name pattern - // [prefix]YYYYMMDD, grouped_entry is the Data Catalog resource name of the - // date sharded grouped entry, e.g. - // projects/{project_id}/locations/{location}/entrygroups/{entry_group_id} - // /entries/{entry_id}. - // Otherwise, grouped_entry will be empty. - string grouped_entry = 1; + // Output only. If the table is a dated shard, i.e., with name pattern + // `[prefix]YYYYMMDD`, `grouped_entry` is the Data Catalog resource name of + // the date sharded grouped entry, for example, + // `projects/{project_id}/locations/{location}/entrygroups/{entry_group_id}/entries/{entry_id}`. + // Otherwise, `grouped_entry` is empty. + string grouped_entry = 1 [ + (google.api.field_behavior) = OUTPUT_ONLY, + (google.api.resource_reference) = { + type: "datacatalog.googleapis.com/Entry" + } + ]; } -// Spec for a group of BigQuery tables with name pattern [prefix]YYYYMMDD. +// Spec for a group of BigQuery tables with name pattern `[prefix]YYYYMMDD`. // Context: // https://cloud.google.com/bigquery/docs/partitioned-tables#partitioning_versus_sharding message BigQueryDateShardedSpec { // Output only. The Data Catalog resource name of the dataset entry the - // current table belongs to, e.g. - // projects/{project_id}/locations/{location}/entrygroups/{entry_group_id} - // /entries/{entry_id} - string dataset = 1; + // current table belongs to, for example, + // `projects/{project_id}/locations/{location}/entrygroups/{entry_group_id}/entries/{entry_id}`. + string dataset = 1 [ + (google.api.field_behavior) = OUTPUT_ONLY, + (google.api.resource_reference) = { + type: "datacatalog.googleapis.com/Entry" + } + ]; // Output only. The table name prefix of the shards. The name of any given - // shard is [table_prefix]YYYYMMDD, e.g. for shard MyTable20180101, the - // table_prefix is "MyTable" - string table_prefix = 2; + // shard is + // `[table_prefix]YYYYMMDD`, for example, for shard `MyTable20180101`, the + // `table_prefix` is `MyTable`. + string table_prefix = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. Total number of shards. - int64 shard_count = 3; + int64 shard_count = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; } diff --git a/datacatalog/google/cloud/datacatalog_v1beta1/proto/table_spec_pb2.py b/datacatalog/google/cloud/datacatalog_v1beta1/proto/table_spec_pb2.py index c2fcf4ba9ea1..95d06ed5d360 100644 --- a/datacatalog/google/cloud/datacatalog_v1beta1/proto/table_spec_pb2.py +++ b/datacatalog/google/cloud/datacatalog_v1beta1/proto/table_spec_pb2.py @@ -16,6 +16,10 @@ _sym_db = _symbol_database.Default() +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 +from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 + + DESCRIPTOR = _descriptor.FileDescriptor( name="google/cloud/datacatalog_v1beta1/proto/table_spec.proto", package="google.cloud.datacatalog.v1beta1", @@ -24,8 +28,12 @@ "\n\034com.google.cloud.datacatalogP\001ZKgoogle.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1;datacatalog\370\001\001" ), serialized_pb=_b( - '\n7google/cloud/datacatalog_v1beta1/proto/table_spec.proto\x12 google.cloud.datacatalog.v1beta1"\xf2\x01\n\x11\x42igQueryTableSpec\x12L\n\x11table_source_type\x18\x01 \x01(\x0e\x32\x31.google.cloud.datacatalog.v1beta1.TableSourceType\x12?\n\tview_spec\x18\x02 \x01(\x0b\x32*.google.cloud.datacatalog.v1beta1.ViewSpecH\x00\x12\x41\n\ntable_spec\x18\x03 \x01(\x0b\x32+.google.cloud.datacatalog.v1beta1.TableSpecH\x00\x42\x0b\n\ttype_spec"\x1e\n\x08ViewSpec\x12\x12\n\nview_query\x18\x01 \x01(\t""\n\tTableSpec\x12\x15\n\rgrouped_entry\x18\x01 \x01(\t"U\n\x17\x42igQueryDateShardedSpec\x12\x0f\n\x07\x64\x61taset\x18\x01 \x01(\t\x12\x14\n\x0ctable_prefix\x18\x02 \x01(\t\x12\x13\n\x0bshard_count\x18\x03 \x01(\x03*[\n\x0fTableSourceType\x12!\n\x1dTABLE_SOURCE_TYPE_UNSPECIFIED\x10\x00\x12\x11\n\rBIGQUERY_VIEW\x10\x02\x12\x12\n\x0e\x42IGQUERY_TABLE\x10\x05\x42p\n\x1c\x63om.google.cloud.datacatalogP\x01ZKgoogle.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1;datacatalog\xf8\x01\x01\x62\x06proto3' + '\n7google/cloud/datacatalog_v1beta1/proto/table_spec.proto\x12 google.cloud.datacatalog.v1beta1\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto"\xf7\x01\n\x11\x42igQueryTableSpec\x12Q\n\x11table_source_type\x18\x01 \x01(\x0e\x32\x31.google.cloud.datacatalog.v1beta1.TableSourceTypeB\x03\xe0\x41\x03\x12?\n\tview_spec\x18\x02 \x01(\x0b\x32*.google.cloud.datacatalog.v1beta1.ViewSpecH\x00\x12\x41\n\ntable_spec\x18\x03 \x01(\x0b\x32+.google.cloud.datacatalog.v1beta1.TableSpecH\x00\x42\x0b\n\ttype_spec"#\n\x08ViewSpec\x12\x17\n\nview_query\x18\x01 \x01(\tB\x03\xe0\x41\x03"L\n\tTableSpec\x12?\n\rgrouped_entry\x18\x01 \x01(\tB(\xe0\x41\x03\xfa\x41"\n datacatalog.googleapis.com/Entry"\x89\x01\n\x17\x42igQueryDateShardedSpec\x12\x39\n\x07\x64\x61taset\x18\x01 \x01(\tB(\xe0\x41\x03\xfa\x41"\n datacatalog.googleapis.com/Entry\x12\x19\n\x0ctable_prefix\x18\x02 \x01(\tB\x03\xe0\x41\x03\x12\x18\n\x0bshard_count\x18\x03 \x01(\x03\x42\x03\xe0\x41\x03*[\n\x0fTableSourceType\x12!\n\x1dTABLE_SOURCE_TYPE_UNSPECIFIED\x10\x00\x12\x11\n\rBIGQUERY_VIEW\x10\x02\x12\x12\n\x0e\x42IGQUERY_TABLE\x10\x05\x42p\n\x1c\x63om.google.cloud.datacatalogP\x01ZKgoogle.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1;datacatalog\xf8\x01\x01\x62\x06proto3' ), + dependencies=[ + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, + google_dot_api_dot_resource__pb2.DESCRIPTOR, + ], ) _TABLESOURCETYPE = _descriptor.EnumDescriptor( @@ -50,8 +58,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=493, - serialized_end=584, + serialized_start=658, + serialized_end=749, ) _sym_db.RegisterEnumDescriptor(_TABLESOURCETYPE) @@ -83,7 +91,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -139,8 +147,8 @@ fields=[], ) ], - serialized_start=94, - serialized_end=336, + serialized_start=154, + serialized_end=401, ) @@ -166,7 +174,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ) ], @@ -178,8 +186,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=338, - serialized_end=368, + serialized_start=403, + serialized_end=438, ) @@ -205,7 +213,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b('\340A\003\372A"\n datacatalog.googleapis.com/Entry'), file=DESCRIPTOR, ) ], @@ -217,8 +225,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=370, - serialized_end=404, + serialized_start=440, + serialized_end=516, ) @@ -244,7 +252,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b('\340A\003\372A"\n datacatalog.googleapis.com/Entry'), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -262,7 +270,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -280,7 +288,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], @@ -292,8 +300,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=406, - serialized_end=491, + serialized_start=519, + serialized_end=656, ) _BIGQUERYTABLESPEC.fields_by_name["table_source_type"].enum_type = _TABLESOURCETYPE @@ -334,10 +342,10 @@ Output only. view_spec: Table view specification. This field should only be populated - if table\_source\_type is BIGQUERY\_VIEW. + if ``table_source_type`` is ``BIGQUERY_VIEW``. table_spec: Spec of a BigQuery table. This field should only be populated - if table\_source\_type is BIGQUERY\_TABLE. + if ``table_source_type`` is ``BIGQUERY_TABLE``. """, # @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1beta1.BigQueryTableSpec) ), @@ -373,12 +381,12 @@ Attributes: grouped_entry: - Output only. If the table is a dated shard, i.e. with name - pattern [prefix]YYYYMMDD, grouped\_entry is the Data Catalog - resource name of the date sharded grouped entry, e.g. projects - /{project\_id}/locations/{location}/entrygroups/{entry\_group\ - _id} /entries/{entry\_id}. Otherwise, grouped\_entry will be - empty. + Output only. If the table is a dated shard, i.e., with name + pattern ``[prefix]YYYYMMDD``, ``grouped_entry`` is the Data + Catalog resource name of the date sharded grouped entry, for + example, ``projects/{project_id}/locations/{location}/entrygro + ups/{entry_group_id}/entries/{entry_id}``. Otherwise, + ``grouped_entry`` is empty. """, # @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1beta1.TableSpec) ), @@ -391,21 +399,22 @@ dict( DESCRIPTOR=_BIGQUERYDATESHARDEDSPEC, __module__="google.cloud.datacatalog_v1beta1.proto.table_spec_pb2", - __doc__="""Spec for a group of BigQuery tables with name pattern [prefix]YYYYMMDD. - Context: + __doc__="""Spec for a group of BigQuery tables with name pattern + ``[prefix]YYYYMMDD``. Context: https://cloud.google.com/bigquery/docs/partitioned-tables#partitioning\_versus\_sharding Attributes: dataset: Output only. The Data Catalog resource name of the dataset - entry the current table belongs to, e.g. projects/{project\_id - }/locations/{location}/entrygroups/{entry\_group\_id} - /entries/{entry\_id} + entry the current table belongs to, for example, ``projects/{p + roject_id}/locations/{location}/entrygroups/{entry_group_id}/e + ntries/{entry_id}``. table_prefix: Output only. The table name prefix of the shards. The name of - any given shard is [table\_prefix]YYYYMMDD, e.g. for shard - MyTable20180101, the table\_prefix is "MyTable" + any given shard is ``[table_prefix]YYYYMMDD``, for example, + for shard ``MyTable20180101``, the ``table_prefix`` is + ``MyTable``. shard_count: Output only. Total number of shards. """, @@ -416,4 +425,10 @@ DESCRIPTOR._options = None +_BIGQUERYTABLESPEC.fields_by_name["table_source_type"]._options = None +_VIEWSPEC.fields_by_name["view_query"]._options = None +_TABLESPEC.fields_by_name["grouped_entry"]._options = None +_BIGQUERYDATESHARDEDSPEC.fields_by_name["dataset"]._options = None +_BIGQUERYDATESHARDEDSPEC.fields_by_name["table_prefix"]._options = None +_BIGQUERYDATESHARDEDSPEC.fields_by_name["shard_count"]._options = None # @@protoc_insertion_point(module_scope) diff --git a/datacatalog/google/cloud/datacatalog_v1beta1/proto/tags.proto b/datacatalog/google/cloud/datacatalog_v1beta1/proto/tags.proto index f01843c206e9..c2fc2da43467 100644 --- a/datacatalog/google/cloud/datacatalog_v1beta1/proto/tags.proto +++ b/datacatalog/google/cloud/datacatalog_v1beta1/proto/tags.proto @@ -17,6 +17,8 @@ syntax = "proto3"; package google.cloud.datacatalog.v1beta1; +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; import "google/protobuf/timestamp.proto"; option cc_enable_arenas = true; @@ -27,40 +29,49 @@ option java_package = "com.google.cloud.datacatalog"; // Tags are used to attach custom metadata to Data Catalog resources. Tags // conform to the specifications within their tag template. message Tag { - // Required when used in - // [UpdateTagRequest][google.cloud.datacatalog.v1beta1.UpdateTagRequest]. The - // resource name of the tag in URL format. For example, - // projects/{project_id}/locations/{location}/entrygroups/{entry_group_id}/entries/{entry_id}/tags/{tag_id}", - // where tag_id is a system-generated identifier. Note that this Tag may not - // actually be stored in the location in this name. + option (google.api.resource) = { + type: "datacatalog.googleapis.com/Tag" + pattern: "projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}/tags/{tag}" + }; + + // The resource name of the tag in URL format. Example: + // + // * projects/{project_id}/locations/{location}/entrygroups/{entry_group_id}/entries/{entry_id}/tags/{tag_id} + // + // where `tag_id` is a system-generated identifier. + // Note that this Tag may not actually be stored in the location in this name. string name = 1; - // Required. The resource name of the tag template that this tag uses. For - // example, - // projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id}. + // Required. The resource name of the tag template that this tag uses. + // Example: + // + // * projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id} + // // This field cannot be modified after creation. - string template = 2; + string template = 2 [(google.api.field_behavior) = REQUIRED]; // Output only. The display name of the tag template. - string template_display_name = 5; + string template_display_name = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; - // Optional. The scope within the parent resource that this tag is attached - // to. If not provided, the tag is attached to the parent resource itself. + // The scope within the parent resource that this tag is attached to. If not + // provided, the tag is attached to the parent resource itself. // Deleting the scope from the parent resource will delete all tags attached // to that scope. These fields cannot be updated after creation. oneof scope { // Resources like Entry can have schemas associated with them. This scope // allows users to attach tags to an individual column based on that schema. // - // For attaching a tag to a nested column, use '.' to separate the column - // names: "outer_column.inner_column". + // For attaching a tag to a nested column, use `.` to separate the column + // names. Example: + // + // * `outer_column.inner_column` string column = 4; } - // Required. This maps the id of a tag field to the value of & additional + // Required. This maps the ID of a tag field to the value of and additional // information about that field. Valid field IDs are defined by the tag's // template. A tag must have at least 1 field and at most 500 fields. - map fields = 3; + map fields = 3 [(google.api.field_behavior) = REQUIRED]; } // Contains the value and supporting information for a field within @@ -73,7 +84,7 @@ message TagField { } // Output only. The display name of this field. - string display_name = 1; + string display_name = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; // Required. The value of this field. oneof kind { @@ -95,41 +106,59 @@ message TagField { } } -// Tag templates defines the schema of the tags used to attach to Data Catalog +// A tag template defines the schema of the tags used to attach to Data Catalog // resources. It defines the mapping of accepted field names and types that can // be used within the tag. The tag template also controls the access to the tag. message TagTemplate { - // Required when used in - // [UpdateTagTemplateRequest][google.cloud.datacatalog.v1beta1.UpdateTagTemplateRequest]. - // The resource name of the tag template in URL format. For example, - // projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id}. + option (google.api.resource) = { + type: "datacatalog.googleapis.com/TagTemplate" + pattern: "projects/{project}/locations/{location}/tagTemplates/{tag_template}" + }; + + // The resource name of the tag template in URL format. Example: + // + // * projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id} + // // Note that this TagTemplate and its child resources may not actually be // stored in the location in this name. string name = 1; - // Optional. The display name for this template. Default value is an empty - // string. + // The display name for this template. Defaults to an empty string. string display_name = 2; - // Required. Map of tag template field ids to the settings for the field. + // Required. Map of tag template field IDs to the settings for the field. // This map is an exhaustive list of the allowed fields. This map must contain // at least one field and at most 500 fields. // // The keys to this map are tag template field IDs. Field IDs can contain // letters (both uppercase and lowercase), numbers (0-9) and underscores (_). - // Field IDs must be at least 1 character long and at most 64 characters long. - // Field IDs must start with a letter or underscore. - map fields = 3; + // Field IDs must be at least 1 character long and at most + // 64 characters long. Field IDs must start with a letter or underscore. + map fields = 3 + [(google.api.field_behavior) = REQUIRED]; } // The template for an individual field within a tag template. message TagTemplateField { - // Optional. The display name for this field. Default value is an empty - // string. + option (google.api.resource) = { + type: "datacatalog.googleapis.com/TagTemplateField" + pattern: "projects/{project}/locations/{location}/tagTemplates/{tag_template}/fields/{field}" + }; + + // Output only. The resource name of the tag template field in URL format. + // Example: + // + // * projects/{project_id}/locations/{location}/tagTemplates/{tag_template}/fields/{field} + // + // Note that this TagTemplateField may not actually be stored in the location + // in this name. + string name = 6 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // The display name for this field. Defaults to an empty string. string display_name = 1; // Required. The type of value this tag field can contain. - FieldType type = 2; + FieldType type = 2 [(google.api.field_behavior) = REQUIRED]; } message FieldType { @@ -137,15 +166,15 @@ message FieldType { message EnumValue { // Required. The display name of the enum value. Must not be an empty // string. - string display_name = 1; + string display_name = 1 [(google.api.field_behavior) = REQUIRED]; } - // Required. The set of allowed values for this enum. This set must not be - // empty, the display names of the values in this set must not be empty and - // the display names of the values must be case-insensitively unique within - // this set. Currently, enum values can only be added to the list of allowed - // values. Deletion and renaming of enum values are not supported. Can have - // up to 500 allowed values. + // Required on create; optional on update. The set of allowed values for + // this enum. This set must not be empty, the display names of the values in + // this set must not be empty and the display names of the values must be + // case-insensitively unique within this set. Currently, enum values can + // only be added to the list of allowed values. Deletion and renaming of + // enum values are not supported. Can have up to 500 allowed values. repeated EnumValue allowed_values = 1; } diff --git a/datacatalog/google/cloud/datacatalog_v1beta1/proto/tags_pb2.py b/datacatalog/google/cloud/datacatalog_v1beta1/proto/tags_pb2.py index c9d36922e79e..379a68564735 100644 --- a/datacatalog/google/cloud/datacatalog_v1beta1/proto/tags_pb2.py +++ b/datacatalog/google/cloud/datacatalog_v1beta1/proto/tags_pb2.py @@ -15,6 +15,8 @@ _sym_db = _symbol_database.Default() +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 +from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 @@ -26,9 +28,13 @@ "\n\034com.google.cloud.datacatalogP\001ZKgoogle.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1;datacatalog\370\001\001" ), serialized_pb=_b( - '\n1google/cloud/datacatalog_v1beta1/proto/tags.proto\x12 google.cloud.datacatalog.v1beta1\x1a\x1fgoogle/protobuf/timestamp.proto"\xfd\x01\n\x03Tag\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x10\n\x08template\x18\x02 \x01(\t\x12\x1d\n\x15template_display_name\x18\x05 \x01(\t\x12\x10\n\x06\x63olumn\x18\x04 \x01(\tH\x00\x12\x41\n\x06\x66ields\x18\x03 \x03(\x0b\x32\x31.google.cloud.datacatalog.v1beta1.Tag.FieldsEntry\x1aY\n\x0b\x46ieldsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x39\n\x05value\x18\x02 \x01(\x0b\x32*.google.cloud.datacatalog.v1beta1.TagField:\x02\x38\x01\x42\x07\n\x05scope"\x94\x02\n\x08TagField\x12\x14\n\x0c\x64isplay_name\x18\x01 \x01(\t\x12\x16\n\x0c\x64ouble_value\x18\x02 \x01(\x01H\x00\x12\x16\n\x0cstring_value\x18\x03 \x01(\tH\x00\x12\x14\n\nbool_value\x18\x04 \x01(\x08H\x00\x12\x35\n\x0ftimestamp_value\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12J\n\nenum_value\x18\x06 \x01(\x0b\x32\x34.google.cloud.datacatalog.v1beta1.TagField.EnumValueH\x00\x1a!\n\tEnumValue\x12\x14\n\x0c\x64isplay_name\x18\x01 \x01(\tB\x06\n\x04kind"\xdf\x01\n\x0bTagTemplate\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x02 \x01(\t\x12I\n\x06\x66ields\x18\x03 \x03(\x0b\x32\x39.google.cloud.datacatalog.v1beta1.TagTemplate.FieldsEntry\x1a\x61\n\x0b\x46ieldsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x41\n\x05value\x18\x02 \x01(\x0b\x32\x32.google.cloud.datacatalog.v1beta1.TagTemplateField:\x02\x38\x01"c\n\x10TagTemplateField\x12\x14\n\x0c\x64isplay_name\x18\x01 \x01(\t\x12\x39\n\x04type\x18\x02 \x01(\x0b\x32+.google.cloud.datacatalog.v1beta1.FieldType"\xa2\x03\n\tFieldType\x12S\n\x0eprimitive_type\x18\x01 \x01(\x0e\x32\x39.google.cloud.datacatalog.v1beta1.FieldType.PrimitiveTypeH\x00\x12I\n\tenum_type\x18\x02 \x01(\x0b\x32\x34.google.cloud.datacatalog.v1beta1.FieldType.EnumTypeH\x00\x1a\x85\x01\n\x08\x45numType\x12V\n\x0e\x61llowed_values\x18\x01 \x03(\x0b\x32>.google.cloud.datacatalog.v1beta1.FieldType.EnumType.EnumValue\x1a!\n\tEnumValue\x12\x14\n\x0c\x64isplay_name\x18\x01 \x01(\t"`\n\rPrimitiveType\x12\x1e\n\x1aPRIMITIVE_TYPE_UNSPECIFIED\x10\x00\x12\n\n\x06\x44OUBLE\x10\x01\x12\n\n\x06STRING\x10\x02\x12\x08\n\x04\x42OOL\x10\x03\x12\r\n\tTIMESTAMP\x10\x04\x42\x0b\n\ttype_declBp\n\x1c\x63om.google.cloud.datacatalogP\x01ZKgoogle.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1;datacatalog\xf8\x01\x01\x62\x06proto3' + '\n1google/cloud/datacatalog_v1beta1/proto/tags.proto\x12 google.cloud.datacatalog.v1beta1\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\x90\x03\n\x03Tag\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x15\n\x08template\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12"\n\x15template_display_name\x18\x05 \x01(\tB\x03\xe0\x41\x03\x12\x10\n\x06\x63olumn\x18\x04 \x01(\tH\x00\x12\x46\n\x06\x66ields\x18\x03 \x03(\x0b\x32\x31.google.cloud.datacatalog.v1beta1.Tag.FieldsEntryB\x03\xe0\x41\x02\x1aY\n\x0b\x46ieldsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x39\n\x05value\x18\x02 \x01(\x0b\x32*.google.cloud.datacatalog.v1beta1.TagField:\x02\x38\x01:\x81\x01\xea\x41~\n\x1e\x64\x61tacatalog.googleapis.com/Tag\x12\\projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}/tags/{tag}B\x07\n\x05scope"\x99\x02\n\x08TagField\x12\x19\n\x0c\x64isplay_name\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12\x16\n\x0c\x64ouble_value\x18\x02 \x01(\x01H\x00\x12\x16\n\x0cstring_value\x18\x03 \x01(\tH\x00\x12\x14\n\nbool_value\x18\x04 \x01(\x08H\x00\x12\x35\n\x0ftimestamp_value\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12J\n\nenum_value\x18\x06 \x01(\x0b\x32\x34.google.cloud.datacatalog.v1beta1.TagField.EnumValueH\x00\x1a!\n\tEnumValue\x12\x14\n\x0c\x64isplay_name\x18\x01 \x01(\tB\x06\n\x04kind"\xd6\x02\n\x0bTagTemplate\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x02 \x01(\t\x12N\n\x06\x66ields\x18\x03 \x03(\x0b\x32\x39.google.cloud.datacatalog.v1beta1.TagTemplate.FieldsEntryB\x03\xe0\x41\x02\x1a\x61\n\x0b\x46ieldsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x41\n\x05value\x18\x02 \x01(\x0b\x32\x32.google.cloud.datacatalog.v1beta1.TagTemplateField:\x02\x38\x01:p\xea\x41m\n&datacatalog.googleapis.com/TagTemplate\x12\x43projects/{project}/locations/{location}/tagTemplates/{tag_template}"\x83\x02\n\x10TagTemplateField\x12\x11\n\x04name\x18\x06 \x01(\tB\x03\xe0\x41\x03\x12\x14\n\x0c\x64isplay_name\x18\x01 \x01(\t\x12>\n\x04type\x18\x02 \x01(\x0b\x32+.google.cloud.datacatalog.v1beta1.FieldTypeB\x03\xe0\x41\x02:\x85\x01\xea\x41\x81\x01\n+datacatalog.googleapis.com/TagTemplateField\x12Rprojects/{project}/locations/{location}/tagTemplates/{tag_template}/fields/{field}"\xa7\x03\n\tFieldType\x12S\n\x0eprimitive_type\x18\x01 \x01(\x0e\x32\x39.google.cloud.datacatalog.v1beta1.FieldType.PrimitiveTypeH\x00\x12I\n\tenum_type\x18\x02 \x01(\x0b\x32\x34.google.cloud.datacatalog.v1beta1.FieldType.EnumTypeH\x00\x1a\x8a\x01\n\x08\x45numType\x12V\n\x0e\x61llowed_values\x18\x01 \x03(\x0b\x32>.google.cloud.datacatalog.v1beta1.FieldType.EnumType.EnumValue\x1a&\n\tEnumValue\x12\x19\n\x0c\x64isplay_name\x18\x01 \x01(\tB\x03\xe0\x41\x02"`\n\rPrimitiveType\x12\x1e\n\x1aPRIMITIVE_TYPE_UNSPECIFIED\x10\x00\x12\n\n\x06\x44OUBLE\x10\x01\x12\n\n\x06STRING\x10\x02\x12\x08\n\x04\x42OOL\x10\x03\x12\r\n\tTIMESTAMP\x10\x04\x42\x0b\n\ttype_declBp\n\x1c\x63om.google.cloud.datacatalogP\x01ZKgoogle.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1;datacatalog\xf8\x01\x01\x62\x06proto3' ), - dependencies=[google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR], + dependencies=[ + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, + google_dot_api_dot_resource__pb2.DESCRIPTOR, + google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, + ], ) @@ -60,8 +66,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=1292, - serialized_end=1388, + serialized_start=1789, + serialized_end=1885, ) _sym_db.RegisterEnumDescriptor(_FIELDTYPE_PRIMITIVETYPE) @@ -118,8 +124,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=276, - serialized_end=365, + serialized_start=351, + serialized_end=440, ) _TAG = _descriptor.Descriptor( @@ -162,7 +168,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -180,7 +186,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -216,14 +222,16 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], extensions=[], nested_types=[_TAG_FIELDSENTRY], enum_types=[], - serialized_options=None, + serialized_options=_b( + "\352A~\n\036datacatalog.googleapis.com/Tag\022\\projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}/tags/{tag}" + ), is_extendable=False, syntax="proto3", extension_ranges=[], @@ -236,8 +244,8 @@ fields=[], ) ], - serialized_start=121, - serialized_end=374, + serialized_start=181, + serialized_end=581, ) @@ -275,8 +283,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=612, - serialized_end=645, + serialized_start=824, + serialized_end=857, ) _TAGFIELD = _descriptor.Descriptor( @@ -301,7 +309,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -411,8 +419,8 @@ fields=[], ) ], - serialized_start=377, - serialized_end=653, + serialized_start=584, + serialized_end=865, ) @@ -468,8 +476,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=782, - serialized_end=879, + serialized_start=999, + serialized_end=1096, ) _TAGTEMPLATE = _descriptor.Descriptor( @@ -530,20 +538,22 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], extensions=[], nested_types=[_TAGTEMPLATE_FIELDSENTRY], enum_types=[], - serialized_options=None, + serialized_options=_b( + "\352Am\n&datacatalog.googleapis.com/TagTemplate\022Cprojects/{project}/locations/{location}/tagTemplates/{tag_template}" + ), is_extendable=False, syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=656, - serialized_end=879, + serialized_start=868, + serialized_end=1210, ) @@ -554,10 +564,28 @@ file=DESCRIPTOR, containing_type=None, fields=[ + _descriptor.FieldDescriptor( + name="name", + full_name="google.cloud.datacatalog.v1beta1.TagTemplateField.name", + index=0, + number=6, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\003"), + file=DESCRIPTOR, + ), _descriptor.FieldDescriptor( name="display_name", full_name="google.cloud.datacatalog.v1beta1.TagTemplateField.display_name", - index=0, + index=1, number=1, type=9, cpp_type=9, @@ -575,7 +603,7 @@ _descriptor.FieldDescriptor( name="type", full_name="google.cloud.datacatalog.v1beta1.TagTemplateField.type", - index=1, + index=2, number=2, type=11, cpp_type=10, @@ -587,20 +615,22 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], extensions=[], nested_types=[], enum_types=[], - serialized_options=None, + serialized_options=_b( + "\352A\201\001\n+datacatalog.googleapis.com/TagTemplateField\022Rprojects/{project}/locations/{location}/tagTemplates/{tag_template}/fields/{field}" + ), is_extendable=False, syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=881, - serialized_end=980, + serialized_start=1213, + serialized_end=1472, ) @@ -626,7 +656,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ) ], @@ -638,8 +668,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=612, - serialized_end=645, + serialized_start=1749, + serialized_end=1787, ) _FIELDTYPE_ENUMTYPE = _descriptor.Descriptor( @@ -676,8 +706,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1157, - serialized_end=1290, + serialized_start=1649, + serialized_end=1787, ) _FIELDTYPE = _descriptor.Descriptor( @@ -740,8 +770,8 @@ fields=[], ) ], - serialized_start=983, - serialized_end=1401, + serialized_start=1475, + serialized_end=1898, ) _TAG_FIELDSENTRY.fields_by_name["value"].message_type = _TAGFIELD @@ -828,34 +858,32 @@ Attributes: name: - Required when used in [UpdateTagRequest][google.cloud.datacata - log.v1beta1.UpdateTagRequest]. The resource name of the tag in - URL format. For example, projects/{project\_id}/locations/{loc - ation}/entrygroups/{entry\_group\_id}/entries/{entry\_id}/tags - /{tag\_id}", where tag\_id is a system-generated identifier. - Note that this Tag may not actually be stored in the location - in this name. + The resource name of the tag in URL format. Example: - proje + cts/{project\_id}/locations/{location}/entrygroups/{entry\_gro + up\_id}/entries/{entry\_id}/tags/{tag\_id} where ``tag_id`` + is a system-generated identifier. Note that this Tag may not + actually be stored in the location in this name. template: Required. The resource name of the tag template that this tag - uses. For example, projects/{project\_id}/locations/{location} - /tagTemplates/{tag\_template\_id}. This field cannot be + uses. Example: - projects/{project\_id}/locations/{location} + /tagTemplates/{tag\_template\_id} This field cannot be modified after creation. template_display_name: Output only. The display name of the tag template. scope: - Optional. The scope within the parent resource that this tag - is attached to. If not provided, the tag is attached to the - parent resource itself. Deleting the scope from the parent - resource will delete all tags attached to that scope. These - fields cannot be updated after creation. + The scope within the parent resource that this tag is attached + to. If not provided, the tag is attached to the parent + resource itself. Deleting the scope from the parent resource + will delete all tags attached to that scope. These fields + cannot be updated after creation. column: Resources like Entry can have schemas associated with them. This scope allows users to attach tags to an individual column based on that schema. For attaching a tag to a nested column, - use '.' to separate the column names: - "outer\_column.inner\_column". + use ``.`` to separate the column names. Example: - + ``outer_column.inner_column`` fields: - Required. This maps the id of a tag field to the value of & + Required. This maps the ID of a tag field to the value of and additional information about that field. Valid field IDs are defined by the tag's template. A tag must have at least 1 field and at most 500 fields. @@ -931,7 +959,7 @@ ), DESCRIPTOR=_TAGTEMPLATE, __module__="google.cloud.datacatalog_v1beta1.proto.tags_pb2", - __doc__="""Tag templates defines the schema of the tags used to attach to Data + __doc__="""A tag template defines the schema of the tags used to attach to Data Catalog resources. It defines the mapping of accepted field names and types that can be used within the tag. The tag template also controls the access to the tag. @@ -939,17 +967,16 @@ Attributes: name: - Required when used in [UpdateTagTemplateRequest][google.cloud. - datacatalog.v1beta1.UpdateTagTemplateRequest]. The resource - name of the tag template in URL format. For example, projects/ - {project\_id}/locations/{location}/tagTemplates/{tag\_template - \_id}. Note that this TagTemplate and its child resources may - not actually be stored in the location in this name. + The resource name of the tag template in URL format. Example: + - projects/{project\_id}/locations/{location}/tagTemplates/{t + ag\_template\_id} Note that this TagTemplate and its child + resources may not actually be stored in the location in this + name. display_name: - Optional. The display name for this template. Default value is - an empty string. + The display name for this template. Defaults to an empty + string. fields: - Required. Map of tag template field ids to the settings for + Required. Map of tag template field IDs to the settings for the field. This map is an exhaustive list of the allowed fields. This map must contain at least one field and at most 500 fields. The keys to this map are tag template field IDs. @@ -974,9 +1001,14 @@ Attributes: + name: + Output only. The resource name of the tag template field in + URL format. Example: - projects/{project\_id}/locations/{loc + ation}/tagTemplates/{tag\_template}/fields/{field} Note that + this TagTemplateField may not actually be stored in the + location in this name. display_name: - Optional. The display name for this field. Default value is an - empty string. + The display name for this field. Defaults to an empty string. type: Required. The type of value this tag field can contain. """, @@ -1013,13 +1045,13 @@ __doc__=""" Attributes: allowed_values: - Required. The set of allowed values for this enum. This set - must not be empty, the display names of the values in this set - must not be empty and the display names of the values must be - case-insensitively unique within this set. Currently, enum - values can only be added to the list of allowed values. - Deletion and renaming of enum values are not supported. Can - have up to 500 allowed values. + Required on create; optional on update. The set of allowed + values for this enum. This set must not be empty, the display + names of the values in this set must not be empty and the + display names of the values must be case-insensitively unique + within this set. Currently, enum values can only be added to + the list of allowed values. Deletion and renaming of enum + values are not supported. Can have up to 500 allowed values. """, # @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1beta1.FieldType.EnumType) ), @@ -1045,5 +1077,16 @@ DESCRIPTOR._options = None _TAG_FIELDSENTRY._options = None +_TAG.fields_by_name["template"]._options = None +_TAG.fields_by_name["template_display_name"]._options = None +_TAG.fields_by_name["fields"]._options = None +_TAG._options = None +_TAGFIELD.fields_by_name["display_name"]._options = None _TAGTEMPLATE_FIELDSENTRY._options = None +_TAGTEMPLATE.fields_by_name["fields"]._options = None +_TAGTEMPLATE._options = None +_TAGTEMPLATEFIELD.fields_by_name["name"]._options = None +_TAGTEMPLATEFIELD.fields_by_name["type"]._options = None +_TAGTEMPLATEFIELD._options = None +_FIELDTYPE_ENUMTYPE_ENUMVALUE.fields_by_name["display_name"]._options = None # @@protoc_insertion_point(module_scope) diff --git a/datacatalog/google/cloud/datacatalog_v1beta1/proto/timestamps.proto b/datacatalog/google/cloud/datacatalog_v1beta1/proto/timestamps.proto index bb048b915223..9a3d640e411c 100644 --- a/datacatalog/google/cloud/datacatalog_v1beta1/proto/timestamps.proto +++ b/datacatalog/google/cloud/datacatalog_v1beta1/proto/timestamps.proto @@ -17,6 +17,7 @@ syntax = "proto3"; package google.cloud.datacatalog.v1beta1; +import "google/api/field_behavior.proto"; import "google/protobuf/timestamp.proto"; option cc_enable_arenas = true; @@ -26,13 +27,14 @@ option java_package = "com.google.cloud.datacatalog"; // Timestamps about this resource according to a particular system. message SystemTimestamps { - // Output only. The creation time of the resource within the given system. + // The creation time of the resource within the given system. google.protobuf.Timestamp create_time = 1; - // Output only. The last-modified time of the resource within the given - // system. + // The last-modified time of the resource within the given system. google.protobuf.Timestamp update_time = 2; // Output only. The expiration time of the resource within the given system. - google.protobuf.Timestamp expire_time = 3; + // Currently only apllicable to BigQuery resources. + google.protobuf.Timestamp expire_time = 3 + [(google.api.field_behavior) = OUTPUT_ONLY]; } diff --git a/datacatalog/google/cloud/datacatalog_v1beta1/proto/timestamps_pb2.py b/datacatalog/google/cloud/datacatalog_v1beta1/proto/timestamps_pb2.py index 3c1c341f65ce..4dff9fc23fa7 100644 --- a/datacatalog/google/cloud/datacatalog_v1beta1/proto/timestamps_pb2.py +++ b/datacatalog/google/cloud/datacatalog_v1beta1/proto/timestamps_pb2.py @@ -15,6 +15,7 @@ _sym_db = _symbol_database.Default() +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 @@ -26,9 +27,12 @@ "\n\034com.google.cloud.datacatalogP\001ZKgoogle.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1;datacatalog\370\001\001" ), serialized_pb=_b( - '\n7google/cloud/datacatalog_v1beta1/proto/timestamps.proto\x12 google.cloud.datacatalog.v1beta1\x1a\x1fgoogle/protobuf/timestamp.proto"\xa5\x01\n\x10SystemTimestamps\x12/\n\x0b\x63reate_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0b\x65xpire_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.TimestampBp\n\x1c\x63om.google.cloud.datacatalogP\x01ZKgoogle.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1;datacatalog\xf8\x01\x01\x62\x06proto3' + '\n7google/cloud/datacatalog_v1beta1/proto/timestamps.proto\x12 google.cloud.datacatalog.v1beta1\x1a\x1fgoogle/api/field_behavior.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xaa\x01\n\x10SystemTimestamps\x12/\n\x0b\x63reate_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x34\n\x0b\x65xpire_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x42p\n\x1c\x63om.google.cloud.datacatalogP\x01ZKgoogle.golang.org/genproto/googleapis/cloud/datacatalog/v1beta1;datacatalog\xf8\x01\x01\x62\x06proto3' ), - dependencies=[google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR], + dependencies=[ + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, + google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, + ], ) @@ -90,7 +94,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], @@ -102,8 +106,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=127, - serialized_end=292, + serialized_start=160, + serialized_end=330, ) _SYSTEMTIMESTAMPS.fields_by_name[ @@ -129,14 +133,13 @@ Attributes: create_time: - Output only. The creation time of the resource within the - given system. + The creation time of the resource within the given system. update_time: - Output only. The last-modified time of the resource within the - given system. + The last-modified time of the resource within the given + system. expire_time: Output only. The expiration time of the resource within the - given system. + given system. Currently only apllicable to BigQuery resources. """, # @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1beta1.SystemTimestamps) ), @@ -145,4 +148,5 @@ DESCRIPTOR._options = None +_SYSTEMTIMESTAMPS.fields_by_name["expire_time"]._options = None # @@protoc_insertion_point(module_scope) diff --git a/datacatalog/google/cloud/datacatalog_v1beta1/types.py b/datacatalog/google/cloud/datacatalog_v1beta1/types.py index 05d90716fc84..e72d22d4ec84 100644 --- a/datacatalog/google/cloud/datacatalog_v1beta1/types.py +++ b/datacatalog/google/cloud/datacatalog_v1beta1/types.py @@ -21,6 +21,7 @@ from google.api_core.protobuf_helpers import get_messages from google.cloud.datacatalog_v1beta1.proto import datacatalog_pb2 +from google.cloud.datacatalog_v1beta1.proto import gcs_fileset_spec_pb2 from google.cloud.datacatalog_v1beta1.proto import schema_pb2 from google.cloud.datacatalog_v1beta1.proto import search_pb2 from google.cloud.datacatalog_v1beta1.proto import table_spec_pb2 @@ -47,6 +48,7 @@ _local_modules = [ datacatalog_pb2, + gcs_fileset_spec_pb2, schema_pb2, search_pb2, table_spec_pb2, diff --git a/datacatalog/noxfile.py b/datacatalog/noxfile.py index 3d92df19084f..509a565876ed 100644 --- a/datacatalog/noxfile.py +++ b/datacatalog/noxfile.py @@ -125,6 +125,25 @@ def system(session): session.run("py.test", "--quiet", system_test_folder_path, *session.posargs) +@nox.session(python=["2.7", "3.7"]) +def samples(session): + requirements_path = os.path.join("samples", "requirements.txt") + requirements_exists = os.path.exists(requirements_path) + + # Sanity check: Only run tests if the environment variable is set. + if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""): + session.skip("Credentials must be set via environment variable") + + session.install("mock", "pytest") + for local_dep in LOCAL_DEPS: + session.install("-e", local_dep) + if requirements_exists: + session.install("-r", requirements_path) + session.install("-e", ".") + + session.run("py.test", "--quiet", "samples", *session.posargs) + + @nox.session(python="3.7") def cover(session): """Run the final coverage report. diff --git a/oslogin/google/cloud/oslogin_v1/proto/oslogin_v1/proto/__init__.py b/datacatalog/samples/__init__.py similarity index 100% rename from oslogin/google/cloud/oslogin_v1/proto/oslogin_v1/proto/__init__.py rename to datacatalog/samples/__init__.py diff --git a/videointelligence/google/cloud/videointelligence_v1beta1/gapic/__init__.py b/datacatalog/samples/tests/__init__.py similarity index 100% rename from videointelligence/google/cloud/videointelligence_v1beta1/gapic/__init__.py rename to datacatalog/samples/tests/__init__.py diff --git a/datacatalog/samples/tests/conftest.py b/datacatalog/samples/tests/conftest.py new file mode 100644 index 000000000000..b0669fa0df28 --- /dev/null +++ b/datacatalog/samples/tests/conftest.py @@ -0,0 +1,81 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import datetime +import uuid + +import pytest + +import google.auth +from google.cloud import datacatalog_v1beta1 + + +@pytest.fixture(scope="session") +def client(credentials): + return datacatalog_v1beta1.DataCatalogClient(credentials=credentials) + + +@pytest.fixture(scope="session") +def default_credentials(): + return google.auth.default() + + +@pytest.fixture(scope="session") +def credentials(default_credentials): + return default_credentials[0] + + +@pytest.fixture(scope="session") +def project_id(default_credentials): + return default_credentials[1] + + +@pytest.fixture +def random_entry_group_id(client, project_id): + now = datetime.datetime.now() + random_entry_group_id = "example_entry_group_{}_{}".format( + now.strftime("%Y%m%d%H%M%S"), uuid.uuid4().hex[:8] + ) + yield random_entry_group_id + entry_group_name = datacatalog_v1beta1.DataCatalogClient.entry_group_path( + project_id, "us-central1", random_entry_group_id + ) + client.delete_entry_group(entry_group_name) + + +@pytest.fixture +def random_entry_name(client, entry_group_name): + now = datetime.datetime.now() + random_entry_id = "example_entry_{}_{}".format( + now.strftime("%Y%m%d%H%M%S"), uuid.uuid4().hex[:8] + ) + random_entry_name = "{}/entries/{}".format(entry_group_name, random_entry_id) + yield random_entry_name + client.delete_entry(random_entry_name) + + +@pytest.fixture +def entry_group_name(client, project_id): + now = datetime.datetime.now() + entry_group_id = "python_entry_group_sample_{}_{}".format( + now.strftime("%Y%m%d%H%M%S"), uuid.uuid4().hex[:8] + ) + entry_group = client.create_entry_group( + datacatalog_v1beta1.DataCatalogClient.location_path(project_id, "us-central1"), + entry_group_id, + {}, + ) + yield entry_group.name + client.delete_entry_group(entry_group.name) diff --git a/datacatalog/samples/tests/test_create_entry_group.py b/datacatalog/samples/tests/test_create_entry_group.py new file mode 100644 index 000000000000..9c8c33b8cd64 --- /dev/null +++ b/datacatalog/samples/tests/test_create_entry_group.py @@ -0,0 +1,29 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from ..v1beta1 import create_entry_group + + +def test_create_entry_group(capsys, client, project_id, random_entry_group_id): + + create_entry_group.create_entry_group(client, project_id, random_entry_group_id) + out, err = capsys.readouterr() + assert ( + "Created entry group" + " projects/{}/locations/{}/entryGroups/{}".format( + project_id, "us-central1", random_entry_group_id + ) + in out + ) diff --git a/datacatalog/samples/tests/test_create_fileset_entry.py b/datacatalog/samples/tests/test_create_fileset_entry.py new file mode 100644 index 000000000000..8d0bc28fd07f --- /dev/null +++ b/datacatalog/samples/tests/test_create_fileset_entry.py @@ -0,0 +1,30 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import re + +from ..v1beta1 import create_fileset_entry + + +def test_create_fileset_entry(capsys, client, random_entry_name): + + entry_name_pattern = "(?P.+?)/entries/(?P.+?$)" + entry_name_matches = re.match(entry_name_pattern, random_entry_name) + entry_group_name = entry_name_matches.group("entry_group_name") + entry_id = entry_name_matches.group("entry_id") + + create_fileset_entry.create_fileset_entry(client, entry_group_name, entry_id) + out, err = capsys.readouterr() + assert "Created entry {}".format(random_entry_name) in out diff --git a/videointelligence/google/cloud/videointelligence_v1beta1/gapic/transports/__init__.py b/datacatalog/samples/v1beta1/__init__.py similarity index 100% rename from videointelligence/google/cloud/videointelligence_v1beta1/gapic/transports/__init__.py rename to datacatalog/samples/v1beta1/__init__.py diff --git a/datacatalog/samples/v1beta1/create_entry_group.py b/datacatalog/samples/v1beta1/create_entry_group.py new file mode 100644 index 000000000000..24a856d8739c --- /dev/null +++ b/datacatalog/samples/v1beta1/create_entry_group.py @@ -0,0 +1,54 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def create_entry_group(client, project_id, entry_group_id): + + # [START datacatalog_create_entry_group_tag] + from google.cloud import datacatalog_v1beta1 + + # TODO(developer): Construct a Data Catalog client object. + # client = datacatalog_v1beta1.DataCatalogClient() + + # TODO(developer): Set entry_group_id to the ID of the + # entry group to create. + # project_id = "your-project-id" + + # TODO(developer): Specify the geographic location where the + # entry group should reside. + # Currently, Data Catalog stores metadata in the us-central1 region. + location_id = "us-central1" + + # TODO(developer): Set entry_group_id to the ID of the + # entry group to create. + # entry_group_id = "your_entry_group_id" + + # Construct a full location path to be the parent of the entry group. + parent = datacatalog_v1beta1.DataCatalogClient.location_path( + project_id, location_id + ) + + # Construct a full EntryGroup object to send to the API. + entry_group = datacatalog_v1beta1.types.EntryGroup() + entry_group.display_name = "My Entry Group" + entry_group.description = "This Entry Group consists of ..." + + # Send the entry group to the API for creation. + # Raises google.api_core.exceptions.AlreadyExists if the Entry Group + # already exists within the project. + entry_group = client.create_entry_group( + parent, entry_group_id, entry_group + ) # Make an API request. + print("Created entry group {}".format(entry_group.name)) + # [END datacatalog_create_entry_group_tag] diff --git a/datacatalog/samples/v1beta1/create_fileset_entry.py b/datacatalog/samples/v1beta1/create_fileset_entry.py new file mode 100644 index 000000000000..6cc275655988 --- /dev/null +++ b/datacatalog/samples/v1beta1/create_fileset_entry.py @@ -0,0 +1,86 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def create_fileset_entry(client, entry_group_name, entry_id): + + # [START datacatalog_create_fileset_tag] + from google.cloud import datacatalog_v1beta1 + + # TODO(developer): Construct a Data Catalog client object. + # client = datacatalog_v1beta1.DataCatalogClient() + + # TODO(developer): Set entry_group_name to the Name of the entry group + # the entry will belong. + # entry_group_name = "your_entry_group_name" + + # TODO(developer): Set entry_id to the ID of the entry to create. + # entry_id = "your_entry_id" + + # Construct a full Entry object to send to the API. + entry = datacatalog_v1beta1.types.Entry() + entry.display_name = "My Fileset" + entry.description = "This Fileset consists of ..." + entry.gcs_fileset_spec.file_patterns.append("gs://my_bucket/*") + entry.type = datacatalog_v1beta1.enums.EntryType.FILESET + + # Create the Schema, for example when you have a csv file. + columns = [] + columns.append( + datacatalog_v1beta1.types.ColumnSchema( + column="first_name", + description="First name", + mode="REQUIRED", + type="STRING", + ) + ) + + columns.append( + datacatalog_v1beta1.types.ColumnSchema( + column="last_name", description="Last name", mode="REQUIRED", type="STRING" + ) + ) + + # Create sub columns for the addresses parent column + subcolumns = [] + subcolumns.append( + datacatalog_v1beta1.types.ColumnSchema( + column="city", description="City", mode="NULLABLE", type="STRING" + ) + ) + + subcolumns.append( + datacatalog_v1beta1.types.ColumnSchema( + column="state", description="State", mode="NULLABLE", type="STRING" + ) + ) + + columns.append( + datacatalog_v1beta1.types.ColumnSchema( + column="addresses", + description="Addresses", + mode="REPEATED", + subcolumns=subcolumns, + type="RECORD", + ) + ) + + entry.schema.columns.extend(columns) + + # Send the entry to the API for creation. + # Raises google.api_core.exceptions.AlreadyExists if the Entry already + # exists within the project. + entry = client.create_entry(entry_group_name, entry_id, entry) + print("Created entry {}".format(entry.name)) + # [END datacatalog_create_fileset_tag] diff --git a/datacatalog/setup.py b/datacatalog/setup.py index 29bbee31b9d8..624600269ca2 100644 --- a/datacatalog/setup.py +++ b/datacatalog/setup.py @@ -21,7 +21,7 @@ name = "google-cloud-datacatalog" description = "Google Cloud Data Catalog API API client library" -version = "0.3.0" +version = "0.4.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' diff --git a/datacatalog/synth.metadata b/datacatalog/synth.metadata index 39cb85fb4bd1..422d34666ecd 100644 --- a/datacatalog/synth.metadata +++ b/datacatalog/synth.metadata @@ -1,26 +1,26 @@ { - "updateTime": "2019-08-27T12:16:56.566500Z", + "updateTime": "2019-10-23T12:17:52.391296Z", "sources": [ { "generator": { "name": "artman", - "version": "0.35.1", - "dockerImage": "googleapis/artman@sha256:b11c7ea0d0831c54016fb50f4b796d24d1971439b30fbc32a369ba1ac887c384" + "version": "0.40.2", + "dockerImage": "googleapis/artman@sha256:3b8f7d9b4c206843ce08053474f5c64ae4d388ff7d995e68b59fb65edf73eeb9" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "650caad718bb063f189405c23972dc9818886358", - "internalRef": "265565344" + "sha": "0d0dc5172f16c9815a5eda6e99408fb96282f608", + "internalRef": "276178557" } }, { "template": { "name": "python_library", "origin": "synthtool.gcp", - "version": "2019.5.2" + "version": "2019.10.17" } } ], diff --git a/datacatalog/synth.py b/datacatalog/synth.py index 468dc63c713d..5f1436288e14 100644 --- a/datacatalog/synth.py +++ b/datacatalog/synth.py @@ -57,7 +57,11 @@ # ---------------------------------------------------------------------------- # Add templated files # ---------------------------------------------------------------------------- -templated_files = common.py_library(unit_cov_level=80, cov_level=80) +templated_files = common.py_library( + unit_cov_level=80, + cov_level=80, + samples_test=True, +) s.move(templated_files) s.shell.run(["nox", "-s", "blacken"], hide_output=False) diff --git a/datacatalog/tests/unit/gapic/v1beta1/test_data_catalog_client_v1beta1.py b/datacatalog/tests/unit/gapic/v1beta1/test_data_catalog_client_v1beta1.py index 35dc0b31db1a..61c35d263299 100644 --- a/datacatalog/tests/unit/gapic/v1beta1/test_data_catalog_client_v1beta1.py +++ b/datacatalog/tests/unit/gapic/v1beta1/test_data_catalog_client_v1beta1.py @@ -112,6 +112,183 @@ def test_search_catalog_exception(self): with pytest.raises(CustomException): list(paged_list_response) + def test_create_entry_group(self): + # Setup Expected Response + name = "name3373707" + display_name = "displayName1615086568" + description = "description-1724546052" + expected_response = { + "name": name, + "display_name": display_name, + "description": description, + } + expected_response = datacatalog_pb2.EntryGroup(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = datacatalog_v1beta1.DataCatalogClient() + + # Setup Request + parent = client.location_path("[PROJECT]", "[LOCATION]") + entry_group_id = "entryGroupId-43122680" + entry_group = {} + + response = client.create_entry_group(parent, entry_group_id, entry_group) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = datacatalog_pb2.CreateEntryGroupRequest( + parent=parent, entry_group_id=entry_group_id, entry_group=entry_group + ) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_create_entry_group_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = datacatalog_v1beta1.DataCatalogClient() + + # Setup request + parent = client.location_path("[PROJECT]", "[LOCATION]") + entry_group_id = "entryGroupId-43122680" + entry_group = {} + + with pytest.raises(CustomException): + client.create_entry_group(parent, entry_group_id, entry_group) + + def test_get_entry_group(self): + # Setup Expected Response + name_2 = "name2-1052831874" + display_name = "displayName1615086568" + description = "description-1724546052" + expected_response = { + "name": name_2, + "display_name": display_name, + "description": description, + } + expected_response = datacatalog_pb2.EntryGroup(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = datacatalog_v1beta1.DataCatalogClient() + + # Setup Request + name = client.entry_group_path("[PROJECT]", "[LOCATION]", "[ENTRY_GROUP]") + + response = client.get_entry_group(name) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = datacatalog_pb2.GetEntryGroupRequest(name=name) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_get_entry_group_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = datacatalog_v1beta1.DataCatalogClient() + + # Setup request + name = client.entry_group_path("[PROJECT]", "[LOCATION]", "[ENTRY_GROUP]") + + with pytest.raises(CustomException): + client.get_entry_group(name) + + def test_delete_entry_group(self): + channel = ChannelStub() + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = datacatalog_v1beta1.DataCatalogClient() + + # Setup Request + name = client.entry_group_path("[PROJECT]", "[LOCATION]", "[ENTRY_GROUP]") + + client.delete_entry_group(name) + + assert len(channel.requests) == 1 + expected_request = datacatalog_pb2.DeleteEntryGroupRequest(name=name) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_delete_entry_group_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = datacatalog_v1beta1.DataCatalogClient() + + # Setup request + name = client.entry_group_path("[PROJECT]", "[LOCATION]", "[ENTRY_GROUP]") + + with pytest.raises(CustomException): + client.delete_entry_group(name) + + def test_create_entry(self): + # Setup Expected Response + name = "name3373707" + linked_resource = "linkedResource1544625012" + display_name = "displayName1615086568" + description = "description-1724546052" + expected_response = { + "name": name, + "linked_resource": linked_resource, + "display_name": display_name, + "description": description, + } + expected_response = datacatalog_pb2.Entry(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = datacatalog_v1beta1.DataCatalogClient() + + # Setup Request + parent = client.entry_group_path("[PROJECT]", "[LOCATION]", "[ENTRY_GROUP]") + entry_id = "entryId-2093663224" + entry = {} + + response = client.create_entry(parent, entry_id, entry) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = datacatalog_pb2.CreateEntryRequest( + parent=parent, entry_id=entry_id, entry=entry + ) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_create_entry_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = datacatalog_v1beta1.DataCatalogClient() + + # Setup request + parent = client.entry_group_path("[PROJECT]", "[LOCATION]", "[ENTRY_GROUP]") + entry_id = "entryId-2093663224" + entry = {} + + with pytest.raises(CustomException): + client.create_entry(parent, entry_id, entry) + def test_update_entry(self): # Setup Expected Response name = "name3373707" @@ -158,6 +335,37 @@ def test_update_entry_exception(self): with pytest.raises(CustomException): client.update_entry(entry) + def test_delete_entry(self): + channel = ChannelStub() + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = datacatalog_v1beta1.DataCatalogClient() + + # Setup Request + name = client.entry_path("[PROJECT]", "[LOCATION]", "[ENTRY_GROUP]", "[ENTRY]") + + client.delete_entry(name) + + assert len(channel.requests) == 1 + expected_request = datacatalog_pb2.DeleteEntryRequest(name=name) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_delete_entry_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = datacatalog_v1beta1.DataCatalogClient() + + # Setup request + name = client.entry_path("[PROJECT]", "[LOCATION]", "[ENTRY_GROUP]", "[ENTRY]") + + with pytest.raises(CustomException): + client.delete_entry(name) + def test_get_entry(self): # Setup Expected Response name_2 = "name2-1052831874" @@ -406,8 +614,9 @@ def test_delete_tag_template_exception(self): def test_create_tag_template_field(self): # Setup Expected Response + name = "name3373707" display_name = "displayName1615086568" - expected_response = {"display_name": display_name} + expected_response = {"name": name, "display_name": display_name} expected_response = tags_pb2.TagTemplateField(**expected_response) # Mock the API response @@ -456,8 +665,9 @@ def test_create_tag_template_field_exception(self): def test_update_tag_template_field(self): # Setup Expected Response + name_2 = "name2-1052831874" display_name = "displayName1615086568" - expected_response = {"display_name": display_name} + expected_response = {"name": name_2, "display_name": display_name} expected_response = tags_pb2.TagTemplateField(**expected_response) # Mock the API response @@ -498,8 +708,9 @@ def test_update_tag_template_field_exception(self): def test_rename_tag_template_field(self): # Setup Expected Response + name_2 = "name2-1052831874" display_name = "displayName1615086568" - expected_response = {"display_name": display_name} + expected_response = {"name": name_2, "display_name": display_name} expected_response = tags_pb2.TagTemplateField(**expected_response) # Mock the API response diff --git a/datalabeling/CHANGELOG.md b/datalabeling/CHANGELOG.md index a97c3cab80fb..9616fc56ff76 100644 --- a/datalabeling/CHANGELOG.md +++ b/datalabeling/CHANGELOG.md @@ -4,6 +4,24 @@ [1]: https://pypi.org/project/google-cloud-datalabeling/#history +## 0.3.0 + +10-10-2019 11:08 PDT + + +### Implementation Changes +- Remove send / receive message size limit (via synth). ([#8950](https://github.com/googleapis/google-cloud-python/pull/8950)) + +### Dependencies +- Bump minimum version for google-api-core to 1.14.0. ([#8709](https://github.com/googleapis/google-cloud-python/pull/8709)) + +### Documentation +- Fix intersphinx reference to requests. ([#9294](https://github.com/googleapis/google-cloud-python/pull/9294)) +- Remove CI for gh-pages, use googleapis.dev for `api_core` refs. ([#9085](https://github.com/googleapis/google-cloud-python/pull/9085)) +- Remove compatability badges from READMEs. ([#9035](https://github.com/googleapis/google-cloud-python/pull/9035)) +- Update intersphinx mapping for requests. ([#8805](https://github.com/googleapis/google-cloud-python/pull/8805)) +- Link to googleapis.dev documentation in READMEs. ([#8705](https://github.com/googleapis/google-cloud-python/pull/8705)) + ## 0.2.1 07-16-2019 10:17 PDT diff --git a/datalabeling/setup.py b/datalabeling/setup.py index d72653c651fe..0cc796762ebf 100644 --- a/datalabeling/setup.py +++ b/datalabeling/setup.py @@ -21,7 +21,7 @@ name = "google-cloud-datalabeling" description = "Data Labeling API client library" -version = "0.2.1" +version = "0.3.0" release_status = "Development Status :: 4 - Beta" dependencies = [ "google-api-core[grpc] >= 1.14.0, < 2.0.0dev", diff --git a/dataproc/docs/conf.py b/dataproc/docs/conf.py index 7b9aa311615a..29a4ab9935bc 100644 --- a/dataproc/docs/conf.py +++ b/dataproc/docs/conf.py @@ -344,7 +344,7 @@ "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("https://requests.kennethreitz.org/en/stable/", None), + "requests": ("https://requests.kennethreitz.org/en/master/", None), "fastavro": ("https://fastavro.readthedocs.io/en/stable/", None), "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), } diff --git a/dataproc/google/cloud/dataproc_v1/gapic/cluster_controller_client.py b/dataproc/google/cloud/dataproc_v1/gapic/cluster_controller_client.py index 4cda9a051e29..82571d6ed3b5 100644 --- a/dataproc/google/cloud/dataproc_v1/gapic/cluster_controller_client.py +++ b/dataproc/google/cloud/dataproc_v1/gapic/cluster_controller_client.py @@ -203,7 +203,9 @@ def create_cluster( metadata=None, ): """ - Creates a cluster in a project. + Creates a cluster in a project. The returned ``Operation.metadata`` will + be + `ClusterOperationMetadata `__. Example: >>> from google.cloud import dataproc_v1 @@ -306,7 +308,9 @@ def update_cluster( metadata=None, ): """ - Updates a cluster in a project. + Updates a cluster in a project. The returned ``Operation.metadata`` will + be + `ClusterOperationMetadata `__. Example: >>> from google.cloud import dataproc_v1 @@ -401,6 +405,10 @@ def update_cluster( config.secondary_worker_config.num_instances Resize secondary worker group + + config.autoscaling_config.policy_uriUse, stop using, or + change autoscaling policies + @@ -489,7 +497,9 @@ def delete_cluster( metadata=None, ): """ - Deletes a cluster in a project. + Deletes a cluster in a project. The returned ``Operation.metadata`` will + be + `ClusterOperationMetadata `__. Example: >>> from google.cloud import dataproc_v1 @@ -779,8 +789,11 @@ def diagnose_cluster( metadata=None, ): """ - Gets cluster diagnostic information. After the operation completes, the - Operation.response field contains ``DiagnoseClusterOutputLocation``. + Gets cluster diagnostic information. The returned ``Operation.metadata`` + will be + `ClusterOperationMetadata `__. + After the operation completes, ``Operation.response`` contains + `DiagnoseClusterResults `__. Example: >>> from google.cloud import dataproc_v1 diff --git a/dataproc/google/cloud/dataproc_v1/gapic/transports/cluster_controller_grpc_transport.py b/dataproc/google/cloud/dataproc_v1/gapic/transports/cluster_controller_grpc_transport.py index b3c8b7d50f64..3c4a813d437b 100644 --- a/dataproc/google/cloud/dataproc_v1/gapic/transports/cluster_controller_grpc_transport.py +++ b/dataproc/google/cloud/dataproc_v1/gapic/transports/cluster_controller_grpc_transport.py @@ -119,7 +119,9 @@ def channel(self): def create_cluster(self): """Return the gRPC stub for :meth:`ClusterControllerClient.create_cluster`. - Creates a cluster in a project. + Creates a cluster in a project. The returned ``Operation.metadata`` will + be + `ClusterOperationMetadata `__. Returns: Callable: A callable which accepts the appropriate @@ -132,7 +134,9 @@ def create_cluster(self): def update_cluster(self): """Return the gRPC stub for :meth:`ClusterControllerClient.update_cluster`. - Updates a cluster in a project. + Updates a cluster in a project. The returned ``Operation.metadata`` will + be + `ClusterOperationMetadata `__. Returns: Callable: A callable which accepts the appropriate @@ -145,7 +149,9 @@ def update_cluster(self): def delete_cluster(self): """Return the gRPC stub for :meth:`ClusterControllerClient.delete_cluster`. - Deletes a cluster in a project. + Deletes a cluster in a project. The returned ``Operation.metadata`` will + be + `ClusterOperationMetadata `__. Returns: Callable: A callable which accepts the appropriate @@ -184,8 +190,11 @@ def list_clusters(self): def diagnose_cluster(self): """Return the gRPC stub for :meth:`ClusterControllerClient.diagnose_cluster`. - Gets cluster diagnostic information. After the operation completes, the - Operation.response field contains ``DiagnoseClusterOutputLocation``. + Gets cluster diagnostic information. The returned ``Operation.metadata`` + will be + `ClusterOperationMetadata `__. + After the operation completes, ``Operation.response`` contains + `DiagnoseClusterResults `__. Returns: Callable: A callable which accepts the appropriate diff --git a/dataproc/google/cloud/dataproc_v1/gapic/transports/workflow_template_service_grpc_transport.py b/dataproc/google/cloud/dataproc_v1/gapic/transports/workflow_template_service_grpc_transport.py index 73955041c6fb..86a35d067ba4 100644 --- a/dataproc/google/cloud/dataproc_v1/gapic/transports/workflow_template_service_grpc_transport.py +++ b/dataproc/google/cloud/dataproc_v1/gapic/transports/workflow_template_service_grpc_transport.py @@ -160,7 +160,10 @@ def instantiate_workflow_template(self): cause any inflight jobs to be cancelled and workflow-owned clusters to be deleted. - The ``Operation.metadata`` will be ``WorkflowMetadata``. + The ``Operation.metadata`` will be + `WorkflowMetadata `__. + Also see `Using + WorkflowMetadata `__. On successful completion, ``Operation.response`` will be ``Empty``. @@ -189,7 +192,10 @@ def instantiate_inline_workflow_template(self): cause any inflight jobs to be cancelled and workflow-owned clusters to be deleted. - The ``Operation.metadata`` will be ``WorkflowMetadata``. + The ``Operation.metadata`` will be + `WorkflowMetadata `__. + Also see `Using + WorkflowMetadata `__. On successful completion, ``Operation.response`` will be ``Empty``. diff --git a/dataproc/google/cloud/dataproc_v1/gapic/workflow_template_service_client.py b/dataproc/google/cloud/dataproc_v1/gapic/workflow_template_service_client.py index b3b38728fc47..0b39b3d67f62 100644 --- a/dataproc/google/cloud/dataproc_v1/gapic/workflow_template_service_client.py +++ b/dataproc/google/cloud/dataproc_v1/gapic/workflow_template_service_client.py @@ -241,9 +241,16 @@ def create_workflow_template( >>> response = client.create_workflow_template(parent, template) Args: - parent (str): Required. The "resource name" of the region, as described in - https://cloud.google.com/apis/design/resource\_names of the form - ``projects/{project_id}/regions/{region}`` + parent (str): Required. The resource name of the region or location, as described in + https://cloud.google.com/apis/design/resource\_names. + + - For ``projects.regions.workflowTemplates,create``, the resource name + of the region has the following format: + ``projects/{project_id}/regions/{region}`` + + - For ``projects.locations.workflowTemplates.create``, the resource + name of the location has the following format: + ``projects/{project_id}/locations/{location}`` template (Union[dict, ~google.cloud.dataproc_v1.types.WorkflowTemplate]): Required. The Dataproc workflow template to create. If a dict is provided, it must be of the same form as the protobuf @@ -322,11 +329,18 @@ def get_workflow_template( >>> response = client.get_workflow_template(name) Args: - name (str): Required. The "resource name" of the workflow template, as described in - https://cloud.google.com/apis/design/resource\_names of the form - ``projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`` + name (str): Required. The resource name of the workflow template, as described in + https://cloud.google.com/apis/design/resource\_names. + + - For ``projects.regions.workflowTemplates.get``, the resource name of + the template has the following format: + ``projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`` + + - For ``projects.locations.workflowTemplates.get``, the resource name + of the template has the following format: + ``projects/{project_id}/locations/{location}/workflowTemplates/{template_id}`` version (int): Optional. The version of workflow template to retrieve. Only previously - instatiated versions can be retrieved. + instantiated versions can be retrieved. If unspecified, retrieves the current version. retry (Optional[google.api_core.retry.Retry]): A retry object used @@ -400,7 +414,10 @@ def instantiate_workflow_template( cause any inflight jobs to be cancelled and workflow-owned clusters to be deleted. - The ``Operation.metadata`` will be ``WorkflowMetadata``. + The ``Operation.metadata`` will be + `WorkflowMetadata `__. + Also see `Using + WorkflowMetadata `__. On successful completion, ``Operation.response`` will be ``Empty``. @@ -423,9 +440,16 @@ def instantiate_workflow_template( >>> metadata = response.metadata() Args: - name (str): Required. The "resource name" of the workflow template, as described in - https://cloud.google.com/apis/design/resource\_names of the form - ``projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`` + name (str): Required. The resource name of the workflow template, as described in + https://cloud.google.com/apis/design/resource\_names. + + - For ``projects.regions.workflowTemplates.instantiate``, the resource + name of the template has the following format: + ``projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`` + + - For ``projects.locations.workflowTemplates.instantiate``, the + resource name of the template has the following format: + ``projects/{project_id}/locations/{location}/workflowTemplates/{template_id}`` version (int): Optional. The version of workflow template to instantiate. If specified, the workflow will be instantiated only if the current version of the workflow template has the supplied version. @@ -525,7 +549,10 @@ def instantiate_inline_workflow_template( cause any inflight jobs to be cancelled and workflow-owned clusters to be deleted. - The ``Operation.metadata`` will be ``WorkflowMetadata``. + The ``Operation.metadata`` will be + `WorkflowMetadata `__. + Also see `Using + WorkflowMetadata `__. On successful completion, ``Operation.response`` will be ``Empty``. @@ -551,9 +578,16 @@ def instantiate_inline_workflow_template( >>> metadata = response.metadata() Args: - parent (str): Required. The "resource name" of the workflow template region, as - described in https://cloud.google.com/apis/design/resource\_names of the - form ``projects/{project_id}/regions/{region}`` + parent (str): Required. The resource name of the region or location, as described in + https://cloud.google.com/apis/design/resource\_names. + + - For ``projects.regions.workflowTemplates,instantiateinline``, the + resource name of the region has the following format: + ``projects/{project_id}/regions/{region}`` + + - For ``projects.locations.workflowTemplates.instantiateinline``, the + resource name of the location has the following format: + ``projects/{project_id}/locations/{location}`` template (Union[dict, ~google.cloud.dataproc_v1.types.WorkflowTemplate]): Required. The workflow template to instantiate. If a dict is provided, it must be of the same form as the protobuf @@ -738,9 +772,16 @@ def list_workflow_templates( ... pass Args: - parent (str): Required. The "resource name" of the region, as described in - https://cloud.google.com/apis/design/resource\_names of the form - ``projects/{project_id}/regions/{region}`` + parent (str): Required. The resource name of the region or location, as described in + https://cloud.google.com/apis/design/resource\_names. + + - For ``projects.regions.workflowTemplates,list``, the resource name of + the region has the following format: + ``projects/{project_id}/regions/{region}`` + + - For ``projects.locations.workflowTemplates.list``, the resource name + of the location has the following format: + ``projects/{project_id}/locations/{location}`` page_size (int): The maximum number of resources contained in the underlying API response. If page streaming is performed per- resource, this parameter does not affect the return value. If page @@ -831,9 +872,16 @@ def delete_workflow_template( >>> client.delete_workflow_template(name) Args: - name (str): Required. The "resource name" of the workflow template, as described in - https://cloud.google.com/apis/design/resource\_names of the form - ``projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`` + name (str): Required. The resource name of the workflow template, as described in + https://cloud.google.com/apis/design/resource\_names. + + - For ``projects.regions.workflowTemplates.delete``, the resource name + of the template has the following format: + ``projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`` + + - For ``projects.locations.workflowTemplates.instantiate``, the + resource name of the template has the following format: + ``projects/{project_id}/locations/{location}/workflowTemplates/{template_id}`` version (int): Optional. The version of workflow template to delete. If specified, will only delete the template if the current server version matches specified version. diff --git a/dataproc/google/cloud/dataproc_v1/proto/autoscaling_policies.proto b/dataproc/google/cloud/dataproc_v1/proto/autoscaling_policies.proto new file mode 100644 index 000000000000..cb466ee851f1 --- /dev/null +++ b/dataproc/google/cloud/dataproc_v1/proto/autoscaling_policies.proto @@ -0,0 +1,340 @@ +// Copyright 2019 Google LLC. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +syntax = "proto3"; + +package google.cloud.dataproc.v1; + +import "google/api/annotations.proto"; +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; +import "google/protobuf/duration.proto"; +import "google/protobuf/empty.proto"; +import "google/api/client.proto"; + +option go_package = "google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataproc"; +option java_multiple_files = true; +option java_outer_classname = "AutoscalingPoliciesProto"; +option java_package = "com.google.cloud.dataproc.v1"; + +// The API interface for managing autoscaling policies in the +// Google Cloud Dataproc API. +service AutoscalingPolicyService { + option (google.api.default_host) = "dataproc.googleapis.com"; + option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; + + // Creates new autoscaling policy. + rpc CreateAutoscalingPolicy(CreateAutoscalingPolicyRequest) returns (AutoscalingPolicy) { + option (google.api.http) = { + post: "/v1/{parent=projects/*/locations/*}/autoscalingPolicies" + body: "policy" + additional_bindings { + post: "/v1/{parent=projects/*/regions/*}/autoscalingPolicies" + body: "policy" + } + }; + } + + // Updates (replaces) autoscaling policy. + // + // Disabled check for update_mask, because all updates will be full + // replacements. + rpc UpdateAutoscalingPolicy(UpdateAutoscalingPolicyRequest) returns (AutoscalingPolicy) { + option (google.api.http) = { + put: "/v1/{policy.name=projects/*/locations/*/autoscalingPolicies/*}" + body: "policy" + additional_bindings { + put: "/v1/{policy.name=projects/*/regions/*/autoscalingPolicies/*}" + body: "policy" + } + }; + } + + // Retrieves autoscaling policy. + rpc GetAutoscalingPolicy(GetAutoscalingPolicyRequest) returns (AutoscalingPolicy) { + option (google.api.http) = { + get: "/v1/{name=projects/*/locations/*/autoscalingPolicies/*}" + additional_bindings { + get: "/v1/{name=projects/*/regions/*/autoscalingPolicies/*}" + } + }; + } + + // Lists autoscaling policies in the project. + rpc ListAutoscalingPolicies(ListAutoscalingPoliciesRequest) returns (ListAutoscalingPoliciesResponse) { + option (google.api.http) = { + get: "/v1/{parent=projects/*/locations/*}/autoscalingPolicies" + additional_bindings { + get: "/v1/{parent=projects/*/regions/*}/autoscalingPolicies" + } + }; + } + + // Deletes an autoscaling policy. It is an error to delete an autoscaling + // policy that is in use by one or more clusters. + rpc DeleteAutoscalingPolicy(DeleteAutoscalingPolicyRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { + delete: "/v1/{name=projects/*/locations/*/autoscalingPolicies/*}" + additional_bindings { + delete: "/v1/{name=projects/*/regions/*/autoscalingPolicies/*}" + } + }; + } +} + +// Describes an autoscaling policy for Dataproc cluster autoscaler. +message AutoscalingPolicy { + option (google.api.resource) = { + type: "dataproc.googleapis.com/AutoscalingPolicy" + pattern: "projects/{project}/regions/{region}/autoscalingPolicies/{autoscaling_policy}" + }; + + // Required. The policy id. + // + // The id must contain only letters (a-z, A-Z), numbers (0-9), + // underscores (_), and hyphens (-). Cannot begin or end with underscore + // or hyphen. Must consist of between 3 and 50 characters. + // + string id = 1; + + // Output only. The "resource name" of the autoscaling policy, as described + // in https://cloud.google.com/apis/design/resource_names. + // + // * For `projects.regions.autoscalingPolicies`, the resource name of the + // policy has the following format: + // `projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}` + // + // * For `projects.locations.autoscalingPolicies`, the resource name of the + // policy has the following format: + // `projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}` + string name = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Autoscaling algorithm for policy. + oneof algorithm { + BasicAutoscalingAlgorithm basic_algorithm = 3 [(google.api.field_behavior) = REQUIRED]; + } + + // Required. Describes how the autoscaler will operate for primary workers. + InstanceGroupAutoscalingPolicyConfig worker_config = 4 [(google.api.field_behavior) = REQUIRED]; + + // Optional. Describes how the autoscaler will operate for secondary workers. + InstanceGroupAutoscalingPolicyConfig secondary_worker_config = 5 [(google.api.field_behavior) = OPTIONAL]; +} + +// Basic algorithm for autoscaling. +message BasicAutoscalingAlgorithm { + // Required. YARN autoscaling configuration. + BasicYarnAutoscalingConfig yarn_config = 1 [(google.api.field_behavior) = REQUIRED]; + + // Optional. Duration between scaling events. A scaling period starts after + // the update operation from the previous event has completed. + // + // Bounds: [2m, 1d]. Default: 2m. + google.protobuf.Duration cooldown_period = 2 [(google.api.field_behavior) = OPTIONAL]; +} + +// Basic autoscaling configurations for YARN. +message BasicYarnAutoscalingConfig { + // Required. Timeout for YARN graceful decommissioning of Node Managers. + // Specifies the duration to wait for jobs to complete before forcefully + // removing workers (and potentially interrupting jobs). Only applicable to + // downscaling operations. + // + // Bounds: [0s, 1d]. + google.protobuf.Duration graceful_decommission_timeout = 5 [(google.api.field_behavior) = REQUIRED]; + + // Required. Fraction of average pending memory in the last cooldown period + // for which to add workers. A scale-up factor of 1.0 will result in scaling + // up so that there is no pending memory remaining after the update (more + // aggressive scaling). A scale-up factor closer to 0 will result in a smaller + // magnitude of scaling up (less aggressive scaling). + // + // Bounds: [0.0, 1.0]. + double scale_up_factor = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. Fraction of average pending memory in the last cooldown period + // for which to remove workers. A scale-down factor of 1 will result in + // scaling down so that there is no available memory remaining after the + // update (more aggressive scaling). A scale-down factor of 0 disables + // removing workers, which can be beneficial for autoscaling a single job. + // + // Bounds: [0.0, 1.0]. + double scale_down_factor = 2 [(google.api.field_behavior) = REQUIRED]; + + // Optional. Minimum scale-up threshold as a fraction of total cluster size + // before scaling occurs. For example, in a 20-worker cluster, a threshold of + // 0.1 means the autoscaler must recommend at least a 2-worker scale-up for + // the cluster to scale. A threshold of 0 means the autoscaler will scale up + // on any recommended change. + // + // Bounds: [0.0, 1.0]. Default: 0.0. + double scale_up_min_worker_fraction = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Minimum scale-down threshold as a fraction of total cluster size + // before scaling occurs. For example, in a 20-worker cluster, a threshold of + // 0.1 means the autoscaler must recommend at least a 2 worker scale-down for + // the cluster to scale. A threshold of 0 means the autoscaler will scale down + // on any recommended change. + // + // Bounds: [0.0, 1.0]. Default: 0.0. + double scale_down_min_worker_fraction = 4 [(google.api.field_behavior) = OPTIONAL]; +} + +// Configuration for the size bounds of an instance group, including its +// proportional size to other groups. +message InstanceGroupAutoscalingPolicyConfig { + // Optional. Minimum number of instances for this group. + // + // Primary workers - Bounds: [2, max_instances]. Default: 2. + // Secondary workers - Bounds: [0, max_instances]. Default: 0. + int32 min_instances = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Required. Maximum number of instances for this group. Required for primary + // workers. Note that by default, clusters will not use secondary workers. + // Required for secondary workers if the minimum secondary instances is set. + // + // Primary workers - Bounds: [min_instances, ). + // Secondary workers - Bounds: [min_instances, ). Default: 0. + int32 max_instances = 2 [(google.api.field_behavior) = REQUIRED]; + + // Optional. Weight for the instance group, which is used to determine the + // fraction of total workers in the cluster from this instance group. + // For example, if primary workers have weight 2, and secondary workers have + // weight 1, the cluster will have approximately 2 primary workers for each + // secondary worker. + // + // The cluster may not reach the specified balance if constrained + // by min/max bounds or other autoscaling settings. For example, if + // `max_instances` for secondary workers is 0, then only primary workers will + // be added. The cluster can also be out of balance when created. + // + // If weight is not set on any instance group, the cluster will default to + // equal weight for all groups: the cluster will attempt to maintain an equal + // number of workers in each group within the configured size bounds for each + // group. If weight is set for one group only, the cluster will default to + // zero weight on the unset group. For example if weight is set only on + // primary workers, the cluster will use primary workers only and no + // secondary workers. + int32 weight = 3 [(google.api.field_behavior) = OPTIONAL]; +} + +// A request to create an autoscaling policy. +message CreateAutoscalingPolicyRequest { + // Required. The "resource name" of the region or location, as described + // in https://cloud.google.com/apis/design/resource_names. + // + // * For `projects.regions.autoscalingPolicies.create`, the resource name + // of the region has the following format: + // `projects/{project_id}/regions/{region}` + // + // * For `projects.locations.autoscalingPolicies.create`, the resource name + // of the location has the following format: + // `projects/{project_id}/locations/{location}` + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + child_type: "dataproc.googleapis.com/AutoscalingPolicy" + } + ]; + + // The autoscaling policy to create. + AutoscalingPolicy policy = 2; +} + +// A request to fetch an autoscaling policy. +message GetAutoscalingPolicyRequest { + // Required. The "resource name" of the autoscaling policy, as described + // in https://cloud.google.com/apis/design/resource_names. + // + // * For `projects.regions.autoscalingPolicies.get`, the resource name + // of the policy has the following format: + // `projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}` + // + // * For `projects.locations.autoscalingPolicies.get`, the resource name + // of the policy has the following format: + // `projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "dataproc.googleapis.com/AutoscalingPolicy" + } + ]; +} + +// A request to update an autoscaling policy. +message UpdateAutoscalingPolicyRequest { + // Required. The updated autoscaling policy. + AutoscalingPolicy policy = 1 [(google.api.field_behavior) = REQUIRED]; +} + +// A request to delete an autoscaling policy. +// +// Autoscaling policies in use by one or more clusters will not be deleted. +message DeleteAutoscalingPolicyRequest { + // Required. The "resource name" of the autoscaling policy, as described + // in https://cloud.google.com/apis/design/resource_names. + // + // * For `projects.regions.autoscalingPolicies.delete`, the resource name + // of the policy has the following format: + // `projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}` + // + // * For `projects.locations.autoscalingPolicies.delete`, the resource name + // of the policy has the following format: + // `projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "dataproc.googleapis.com/AutoscalingPolicy" + } + ]; +} + +// A request to list autoscaling policies in a project. +message ListAutoscalingPoliciesRequest { + // Required. The "resource name" of the region or location, as described + // in https://cloud.google.com/apis/design/resource_names. + // + // * For `projects.regions.autoscalingPolicies.list`, the resource name + // of the region has the following format: + // `projects/{project_id}/regions/{region}` + // + // * For `projects.locations.autoscalingPolicies.list`, the resource name + // of the location has the following format: + // `projects/{project_id}/locations/{location}` + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + child_type: "dataproc.googleapis.com/AutoscalingPolicy" + } + ]; + + // Optional. The maximum number of results to return in each response. + // Must be less than or equal to 1000. Defaults to 100. + int32 page_size = 2 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The page token, returned by a previous call, to request the + // next page of results. + string page_token = 3 [(google.api.field_behavior) = OPTIONAL]; +} + +// A response to a request to list autoscaling policies in a project. +message ListAutoscalingPoliciesResponse { + // Output only. Autoscaling policies list. + repeated AutoscalingPolicy policies = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. This token is included in the response if there are more + // results to fetch. + string next_page_token = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; +} diff --git a/dataproc/google/cloud/dataproc_v1/proto/autoscaling_policies_pb2.py b/dataproc/google/cloud/dataproc_v1/proto/autoscaling_policies_pb2.py new file mode 100644 index 000000000000..0dc596cd2eca --- /dev/null +++ b/dataproc/google/cloud/dataproc_v1/proto/autoscaling_policies_pb2.py @@ -0,0 +1,1207 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/dataproc_v1/proto/autoscaling_policies.proto + +import sys + +_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 +from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 +from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 +from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name="google/cloud/dataproc_v1/proto/autoscaling_policies.proto", + package="google.cloud.dataproc.v1", + syntax="proto3", + serialized_options=_b( + "\n\034com.google.cloud.dataproc.v1B\030AutoscalingPoliciesProtoP\001Z@google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataproc" + ), + serialized_pb=_b( + '\n9google/cloud/dataproc_v1/proto/autoscaling_policies.proto\x12\x18google.cloud.dataproc.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x17google/api/client.proto"\xd4\x03\n\x11\x41utoscalingPolicy\x12\n\n\x02id\x18\x01 \x01(\t\x12\x11\n\x04name\x18\x02 \x01(\tB\x03\xe0\x41\x03\x12S\n\x0f\x62\x61sic_algorithm\x18\x03 \x01(\x0b\x32\x33.google.cloud.dataproc.v1.BasicAutoscalingAlgorithmB\x03\xe0\x41\x02H\x00\x12Z\n\rworker_config\x18\x04 \x01(\x0b\x32>.google.cloud.dataproc.v1.InstanceGroupAutoscalingPolicyConfigB\x03\xe0\x41\x02\x12\x64\n\x17secondary_worker_config\x18\x05 \x01(\x0b\x32>.google.cloud.dataproc.v1.InstanceGroupAutoscalingPolicyConfigB\x03\xe0\x41\x01:|\xea\x41y\n)dataproc.googleapis.com/AutoscalingPolicy\x12Lprojects/{project}/regions/{region}/autoscalingPolicies/{autoscaling_policy}B\x0b\n\talgorithm"\xa4\x01\n\x19\x42\x61sicAutoscalingAlgorithm\x12N\n\x0byarn_config\x18\x01 \x01(\x0b\x32\x34.google.cloud.dataproc.v1.BasicYarnAutoscalingConfigB\x03\xe0\x41\x02\x12\x37\n\x0f\x63ooldown_period\x18\x02 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x01"\xf9\x01\n\x1a\x42\x61sicYarnAutoscalingConfig\x12\x45\n\x1dgraceful_decommission_timeout\x18\x05 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x02\x12\x1c\n\x0fscale_up_factor\x18\x01 \x01(\x01\x42\x03\xe0\x41\x02\x12\x1e\n\x11scale_down_factor\x18\x02 \x01(\x01\x42\x03\xe0\x41\x02\x12)\n\x1cscale_up_min_worker_fraction\x18\x03 \x01(\x01\x42\x03\xe0\x41\x01\x12+\n\x1escale_down_min_worker_fraction\x18\x04 \x01(\x01\x42\x03\xe0\x41\x01"s\n$InstanceGroupAutoscalingPolicyConfig\x12\x1a\n\rmin_instances\x18\x01 \x01(\x05\x42\x03\xe0\x41\x01\x12\x1a\n\rmax_instances\x18\x02 \x01(\x05\x42\x03\xe0\x41\x02\x12\x13\n\x06weight\x18\x03 \x01(\x05\x42\x03\xe0\x41\x01"\xa0\x01\n\x1e\x43reateAutoscalingPolicyRequest\x12\x41\n\x06parent\x18\x01 \x01(\tB1\xe0\x41\x02\xfa\x41+\x12)dataproc.googleapis.com/AutoscalingPolicy\x12;\n\x06policy\x18\x02 \x01(\x0b\x32+.google.cloud.dataproc.v1.AutoscalingPolicy"^\n\x1bGetAutoscalingPolicyRequest\x12?\n\x04name\x18\x01 \x01(\tB1\xe0\x41\x02\xfa\x41+\n)dataproc.googleapis.com/AutoscalingPolicy"b\n\x1eUpdateAutoscalingPolicyRequest\x12@\n\x06policy\x18\x01 \x01(\x0b\x32+.google.cloud.dataproc.v1.AutoscalingPolicyB\x03\xe0\x41\x02"a\n\x1e\x44\x65leteAutoscalingPolicyRequest\x12?\n\x04name\x18\x01 \x01(\tB1\xe0\x41\x02\xfa\x41+\n)dataproc.googleapis.com/AutoscalingPolicy"\x94\x01\n\x1eListAutoscalingPoliciesRequest\x12\x41\n\x06parent\x18\x01 \x01(\tB1\xe0\x41\x02\xfa\x41+\x12)dataproc.googleapis.com/AutoscalingPolicy\x12\x16\n\tpage_size\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01\x12\x17\n\npage_token\x18\x03 \x01(\tB\x03\xe0\x41\x01"\x83\x01\n\x1fListAutoscalingPoliciesResponse\x12\x42\n\x08policies\x18\x01 \x03(\x0b\x32+.google.cloud.dataproc.v1.AutoscalingPolicyB\x03\xe0\x41\x03\x12\x1c\n\x0fnext_page_token\x18\x02 \x01(\tB\x03\xe0\x41\x03\x32\xfd\n\n\x18\x41utoscalingPolicyService\x12\x8c\x02\n\x17\x43reateAutoscalingPolicy\x12\x38.google.cloud.dataproc.v1.CreateAutoscalingPolicyRequest\x1a+.google.cloud.dataproc.v1.AutoscalingPolicy"\x89\x01\x82\xd3\xe4\x93\x02\x82\x01"7/v1/{parent=projects/*/locations/*}/autoscalingPolicies:\x06policyZ?"5/v1/{parent=projects/*/regions/*}/autoscalingPolicies:\x06policy\x12\x9a\x02\n\x17UpdateAutoscalingPolicy\x12\x38.google.cloud.dataproc.v1.UpdateAutoscalingPolicyRequest\x1a+.google.cloud.dataproc.v1.AutoscalingPolicy"\x97\x01\x82\xd3\xe4\x93\x02\x90\x01\x1a>/v1/{policy.name=projects/*/locations/*/autoscalingPolicies/*}:\x06policyZF\x1a/v1/{policy.name=projects/*/locations/*/autoscalingPolicies/*}:\006policyZF\032 labels = 8; + map labels = 8 [(google.api.field_behavior) = OPTIONAL]; // Output only. Cluster status. - ClusterStatus status = 4; + ClusterStatus status = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The previous cluster status. - repeated ClusterStatus status_history = 7; + repeated ClusterStatus status_history = 7 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. A cluster UUID (Unique Universal Identifier). Cloud Dataproc // generates this value when it creates the cluster. - string cluster_uuid = 6; + string cluster_uuid = 6 [(google.api.field_behavior) = OUTPUT_ONLY]; // Contains cluster daemon metrics such as HDFS and YARN stats. // @@ -132,26 +169,26 @@ message ClusterConfig { // and manage this project-level, per-location bucket (see // [Cloud Dataproc staging // bucket](/dataproc/docs/concepts/configuring-clusters/staging-bucket)). - string config_bucket = 1; + string config_bucket = 1 [(google.api.field_behavior) = OPTIONAL]; // Optional. The shared Compute Engine config settings for // all instances in a cluster. - GceClusterConfig gce_cluster_config = 8; + GceClusterConfig gce_cluster_config = 8 [(google.api.field_behavior) = OPTIONAL]; // Optional. The Compute Engine config settings for // the master instance in a cluster. - InstanceGroupConfig master_config = 9; + InstanceGroupConfig master_config = 9 [(google.api.field_behavior) = OPTIONAL]; // Optional. The Compute Engine config settings for // worker instances in a cluster. - InstanceGroupConfig worker_config = 10; + InstanceGroupConfig worker_config = 10 [(google.api.field_behavior) = OPTIONAL]; // Optional. The Compute Engine config settings for // additional worker instances in a cluster. - InstanceGroupConfig secondary_worker_config = 12; + InstanceGroupConfig secondary_worker_config = 12 [(google.api.field_behavior) = OPTIONAL]; // Optional. The config settings for software inside the cluster. - SoftwareConfig software_config = 13; + SoftwareConfig software_config = 13 [(google.api.field_behavior) = OPTIONAL]; // Optional. Commands to execute on each node after config is // completed. By default, executables are run on master and all worker nodes. @@ -166,17 +203,38 @@ message ClusterConfig { // else // ... worker specific actions ... // fi - repeated NodeInitializationAction initialization_actions = 11; + repeated NodeInitializationAction initialization_actions = 11 [(google.api.field_behavior) = OPTIONAL]; // Optional. Encryption settings for the cluster. - EncryptionConfig encryption_config = 15; + EncryptionConfig encryption_config = 15 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Autoscaling config for the policy associated with the cluster. + // Cluster does not autoscale if this field is unset. + AutoscalingConfig autoscaling_config = 18 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Security settings for the cluster. + SecurityConfig security_config = 16 [(google.api.field_behavior) = OPTIONAL]; +} + +// Autoscaling Policy config associated with the cluster. +message AutoscalingConfig { + // Optional. The autoscaling policy used by the cluster. + // + // Only resource names including projectid and location (region) are valid. + // Examples: + // + // * `https://www.googleapis.com/compute/v1/projects/[project_id]/locations/[dataproc_region]/autoscalingPolicies/[policy_id]` + // * `projects/[project_id]/locations/[dataproc_region]/autoscalingPolicies/[policy_id]` + // + // Note that the policy must be in the same project and Cloud Dataproc region. + string policy_uri = 1 [(google.api.field_behavior) = OPTIONAL]; } // Encryption settings for the cluster. message EncryptionConfig { // Optional. The Cloud KMS key name to use for PD disk encryption for all // instances in the cluster. - string gce_pd_kms_key_name = 1; + string gce_pd_kms_key_name = 1 [(google.api.field_behavior) = OPTIONAL]; } // Common config settings for resources of Compute Engine cluster @@ -193,7 +251,7 @@ message GceClusterConfig { // * `https://www.googleapis.com/compute/v1/projects/[project_id]/zones/[zone]` // * `projects/[project_id]/zones/[zone]` // * `us-central1-f` - string zone_uri = 1; + string zone_uri = 1 [(google.api.field_behavior) = OPTIONAL]; // Optional. The Compute Engine network to be used for machine // communications. Cannot be specified with subnetwork_uri. If neither @@ -206,7 +264,7 @@ message GceClusterConfig { // * `https://www.googleapis.com/compute/v1/projects/[project_id]/regions/global/default` // * `projects/[project_id]/regions/global/default` // * `default` - string network_uri = 2; + string network_uri = 2 [(google.api.field_behavior) = OPTIONAL]; // Optional. The Compute Engine subnetwork to be used for machine // communications. Cannot be specified with network_uri. @@ -216,7 +274,7 @@ message GceClusterConfig { // * `https://www.googleapis.com/compute/v1/projects/[project_id]/regions/us-east1/subnetworks/sub0` // * `projects/[project_id]/regions/us-east1/subnetworks/sub0` // * `sub0` - string subnetwork_uri = 6; + string subnetwork_uri = 6 [(google.api.field_behavior) = OPTIONAL]; // Optional. If true, all instances in the cluster will only have internal IP // addresses. By default, clusters are not restricted to internal IP @@ -224,7 +282,7 @@ message GceClusterConfig { // instance. This `internal_ip_only` restriction can only be enabled for // subnetwork enabled networks, and all off-cluster dependencies must be // configured to be accessible without external IP addresses. - bool internal_ip_only = 7; + bool internal_ip_only = 7 [(google.api.field_behavior) = OPTIONAL]; // Optional. The service account of the instances. Defaults to the default // Compute Engine service account. Custom service accounts need @@ -237,7 +295,7 @@ message GceClusterConfig { // https://cloud.google.com/compute/docs/access/service-accounts#custom_service_accounts // for more information). // Example: `[account_id]@[project_id].iam.gserviceaccount.com` - string service_account = 8; + string service_account = 8 [(google.api.field_behavior) = OPTIONAL]; // Optional. The URIs of service account scopes to be included in // Compute Engine instances. The following base set of scopes is always @@ -253,7 +311,7 @@ message GceClusterConfig { // * https://www.googleapis.com/auth/bigtable.admin.table // * https://www.googleapis.com/auth/bigtable.data // * https://www.googleapis.com/auth/devstorage.full_control - repeated string service_account_scopes = 3; + repeated string service_account_scopes = 3 [(google.api.field_behavior) = OPTIONAL]; // The Compute Engine tags to add to all instances (see // [Tagging instances](/compute/docs/label-or-tag-resources#tags)). @@ -270,16 +328,16 @@ message GceClusterConfig { message InstanceGroupConfig { // Optional. The number of VM instances in the instance group. // For master instance groups, must be set to 1. - int32 num_instances = 1; + int32 num_instances = 1 [(google.api.field_behavior) = OPTIONAL]; // Output only. The list of instance names. Cloud Dataproc derives the names // from `cluster_name`, `num_instances`, and the instance group. - repeated string instance_names = 2; + repeated string instance_names = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; // Optional. The Compute Engine image resource used for cluster // instances. It can be specified or may be inferred from // `SoftwareConfig.image_version`. - string image_uri = 3; + string image_uri = 3 [(google.api.field_behavior) = OPTIONAL]; // Optional. The Compute Engine machine type used for cluster instances. // @@ -294,36 +352,38 @@ message InstanceGroupConfig { // Placement](/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) // feature, you must use the short name of the machine type // resource, for example, `n1-standard-2`. - string machine_type_uri = 4; + string machine_type_uri = 4 [(google.api.field_behavior) = OPTIONAL]; // Optional. Disk option config settings. - DiskConfig disk_config = 5; + DiskConfig disk_config = 5 [(google.api.field_behavior) = OPTIONAL]; // Optional. Specifies that this instance group contains preemptible // instances. - bool is_preemptible = 6; + bool is_preemptible = 6 [(google.api.field_behavior) = OPTIONAL]; // Output only. The config for Compute Engine Instance Group // Manager that manages this group. // This is only used for preemptible instance groups. - ManagedGroupConfig managed_group_config = 7; + ManagedGroupConfig managed_group_config = 7 [(google.api.field_behavior) = OUTPUT_ONLY]; // Optional. The Compute Engine accelerator configuration for these // instances. - // - // **Beta Feature**: This feature is still under development. It may be - // changed before final release. - repeated AcceleratorConfig accelerators = 8; + repeated AcceleratorConfig accelerators = 8 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Specifies the minimum cpu platform for the Instance Group. + // See [Cloud Dataproc→Minimum CPU Platform] + // (/dataproc/docs/concepts/compute/dataproc-min-cpu). + string min_cpu_platform = 9 [(google.api.field_behavior) = OPTIONAL]; } // Specifies the resources used to actively manage an instance group. message ManagedGroupConfig { // Output only. The name of the Instance Template used for the Managed // Instance Group. - string instance_template_name = 1; + string instance_template_name = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The name of the Instance Group Manager for this group. - string instance_group_manager_name = 2; + string instance_group_manager_name = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; } // Specifies the type and number of accelerator cards attached to the instances @@ -356,10 +416,10 @@ message DiskConfig { // Optional. Type of the boot disk (default is "pd-standard"). // Valid values: "pd-ssd" (Persistent Disk Solid State Drive) or // "pd-standard" (Persistent Disk Hard Disk Drive). - string boot_disk_type = 3; + string boot_disk_type = 3 [(google.api.field_behavior) = OPTIONAL]; // Optional. Size in GB of the boot disk (default is 500GB). - int32 boot_disk_size_gb = 1; + int32 boot_disk_size_gb = 1 [(google.api.field_behavior) = OPTIONAL]; // Optional. Number of attached SSDs, from 0 to 4 (default is 0). // If SSDs are not attached, the boot disk is used to store runtime logs and @@ -367,20 +427,20 @@ message DiskConfig { // If one or more SSDs are attached, this runtime bulk // data is spread across them, and the boot disk contains only basic // config and installed binaries. - int32 num_local_ssds = 2; + int32 num_local_ssds = 2 [(google.api.field_behavior) = OPTIONAL]; } // Specifies an executable to run on a fully configured node and a // timeout period for executable completion. message NodeInitializationAction { // Required. Cloud Storage URI of executable file. - string executable_file = 1; + string executable_file = 1 [(google.api.field_behavior) = REQUIRED]; // Optional. Amount of time executable has to complete. Default is // 10 minutes. Cluster creation fails with an explanatory error message (the // name of the executable that caused the error and the exceeded timeout // period) if the executable is not completed at end of the timeout period. - google.protobuf.Duration execution_timeout = 2; + google.protobuf.Duration execution_timeout = 2 [(google.api.field_behavior) = OPTIONAL]; } // The status of a cluster and its instances. @@ -426,17 +486,95 @@ message ClusterStatus { } // Output only. The cluster's state. - State state = 1; + State state = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; - // Output only. Optional details of cluster's state. - string detail = 2; + // Optional. Output only. Details of cluster's state. + string detail = 2 [ + (google.api.field_behavior) = OUTPUT_ONLY, + (google.api.field_behavior) = OPTIONAL + ]; // Output only. Time when this state was entered. - google.protobuf.Timestamp state_start_time = 3; + google.protobuf.Timestamp state_start_time = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. Additional state information that includes // status reported by the agent. - Substate substate = 4; + Substate substate = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; +} + +// Security related configuration, including Kerberos. +message SecurityConfig { + // Kerberos related configuration. + KerberosConfig kerberos_config = 1; +} + +// Specifies Kerberos related configuration. +message KerberosConfig { + // Optional. Flag to indicate whether to Kerberize the cluster. + bool enable_kerberos = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Required. The Cloud Storage URI of a KMS encrypted file containing the root + // principal password. + string root_principal_password_uri = 2 [(google.api.field_behavior) = REQUIRED]; + + // Required. The uri of the KMS key used to encrypt various sensitive + // files. + string kms_key_uri = 3 [(google.api.field_behavior) = REQUIRED]; + + // Optional. The Cloud Storage URI of the keystore file used for SSL + // encryption. If not provided, Dataproc will provide a self-signed + // certificate. + string keystore_uri = 4 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The Cloud Storage URI of the truststore file used for SSL + // encryption. If not provided, Dataproc will provide a self-signed + // certificate. + string truststore_uri = 5 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The Cloud Storage URI of a KMS encrypted file containing the + // password to the user provided keystore. For the self-signed certificate, + // this password is generated by Dataproc. + string keystore_password_uri = 6 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The Cloud Storage URI of a KMS encrypted file containing the + // password to the user provided key. For the self-signed certificate, this + // password is generated by Dataproc. + string key_password_uri = 7 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The Cloud Storage URI of a KMS encrypted file containing the + // password to the user provided truststore. For the self-signed certificate, + // this password is generated by Dataproc. + string truststore_password_uri = 8 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The remote realm the Dataproc on-cluster KDC will trust, should + // the user enable cross realm trust. + string cross_realm_trust_realm = 9 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The KDC (IP or hostname) for the remote trusted realm in a cross + // realm trust relationship. + string cross_realm_trust_kdc = 10 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The admin server (IP or hostname) for the remote trusted realm in + // a cross realm trust relationship. + string cross_realm_trust_admin_server = 11 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The Cloud Storage URI of a KMS encrypted file containing the + // shared password between the on-cluster Kerberos realm and the remote + // trusted realm, in a cross realm trust relationship. + string cross_realm_trust_shared_password_uri = 12 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The Cloud Storage URI of a KMS encrypted file containing the + // master key of the KDC database. + string kdc_db_key_uri = 13 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The lifetime of the ticket granting ticket, in hours. + // If not specified, or user specifies 0, then default value 10 + // will be used. + int32 tgt_lifetime_hours = 14 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The name of the on-cluster Kerberos realm. + // If not specified, the uppercased domain of hostnames will be the realm. + string realm = 15 [(google.api.field_behavior) = OPTIONAL]; } // Specifies the selection and config of software inside the cluster. @@ -448,7 +586,7 @@ message SoftwareConfig { // ["preview" // version](/dataproc/docs/concepts/versioning/dataproc-versions#other_versions). // If unspecified, it defaults to the latest Debian version. - string image_version = 1; + string image_version = 1 [(google.api.field_behavior) = OPTIONAL]; // Optional. The properties to set on daemon config files. // @@ -468,10 +606,10 @@ message SoftwareConfig { // // For more information, see // [Cluster properties](/dataproc/docs/concepts/cluster-properties). - map properties = 2; + map properties = 2 [(google.api.field_behavior) = OPTIONAL]; - // The set of optional components to activate on the cluster. - repeated Component optional_components = 3; + // Optional. The set of components to activate on the cluster. + repeated Component optional_components = 3 [(google.api.field_behavior) = OPTIONAL]; } // Contains cluster daemon metrics, such as HDFS and YARN stats. @@ -490,13 +628,13 @@ message ClusterMetrics { message CreateClusterRequest { // Required. The ID of the Google Cloud Platform project that the cluster // belongs to. - string project_id = 1; + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; // Required. The Cloud Dataproc region in which to handle the request. - string region = 3; + string region = 3 [(google.api.field_behavior) = REQUIRED]; // Required. The cluster to create. - Cluster cluster = 2; + Cluster cluster = 2 [(google.api.field_behavior) = REQUIRED]; // Optional. A unique id used to identify the request. If the server // receives two [CreateClusterRequest][google.cloud.dataproc.v1.CreateClusterRequest] requests with the same @@ -509,23 +647,23 @@ message CreateClusterRequest { // // The id must contain only letters (a-z, A-Z), numbers (0-9), // underscores (_), and hyphens (-). The maximum length is 40 characters. - string request_id = 4; + string request_id = 4 [(google.api.field_behavior) = OPTIONAL]; } // A request to update a cluster. message UpdateClusterRequest { // Required. The ID of the Google Cloud Platform project the // cluster belongs to. - string project_id = 1; + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; // Required. The Cloud Dataproc region in which to handle the request. - string region = 5; + string region = 5 [(google.api.field_behavior) = REQUIRED]; // Required. The cluster name. - string cluster_name = 2; + string cluster_name = 2 [(google.api.field_behavior) = REQUIRED]; // Required. The changes to the cluster. - Cluster cluster = 3; + Cluster cluster = 3 [(google.api.field_behavior) = REQUIRED]; // Optional. Timeout for graceful YARN decomissioning. Graceful // decommissioning allows removing nodes from the cluster without @@ -535,7 +673,7 @@ message UpdateClusterRequest { // the maximum allowed timeout is 1 day. // // Only supported on Dataproc image versions 1.2 and higher. - google.protobuf.Duration graceful_decommission_timeout = 6; + google.protobuf.Duration graceful_decommission_timeout = 6 [(google.api.field_behavior) = OPTIONAL]; // Required. Specifies the path, relative to `Cluster`, of // the field to update. For example, to change the number of workers @@ -582,9 +720,13 @@ message UpdateClusterRequest { // config.secondary_worker_config.num_instances // Resize secondary worker group // + // + // config.autoscaling_config.policy_uriUse, stop using, or + // change autoscaling policies + // // // - google.protobuf.FieldMask update_mask = 4; + google.protobuf.FieldMask update_mask = 4 [(google.api.field_behavior) = REQUIRED]; // Optional. A unique id used to identify the request. If the server // receives two [UpdateClusterRequest][google.cloud.dataproc.v1.UpdateClusterRequest] requests with the same @@ -597,24 +739,24 @@ message UpdateClusterRequest { // // The id must contain only letters (a-z, A-Z), numbers (0-9), // underscores (_), and hyphens (-). The maximum length is 40 characters. - string request_id = 7; + string request_id = 7 [(google.api.field_behavior) = OPTIONAL]; } // A request to delete a cluster. message DeleteClusterRequest { // Required. The ID of the Google Cloud Platform project that the cluster // belongs to. - string project_id = 1; + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; // Required. The Cloud Dataproc region in which to handle the request. - string region = 3; + string region = 3 [(google.api.field_behavior) = REQUIRED]; // Required. The cluster name. - string cluster_name = 2; + string cluster_name = 2 [(google.api.field_behavior) = REQUIRED]; // Optional. Specifying the `cluster_uuid` means the RPC should fail // (with error NOT_FOUND) if cluster with specified UUID does not exist. - string cluster_uuid = 4; + string cluster_uuid = 4 [(google.api.field_behavior) = OPTIONAL]; // Optional. A unique id used to identify the request. If the server // receives two [DeleteClusterRequest][google.cloud.dataproc.v1.DeleteClusterRequest] requests with the same @@ -627,30 +769,30 @@ message DeleteClusterRequest { // // The id must contain only letters (a-z, A-Z), numbers (0-9), // underscores (_), and hyphens (-). The maximum length is 40 characters. - string request_id = 5; + string request_id = 5 [(google.api.field_behavior) = OPTIONAL]; } // Request to get the resource representation for a cluster in a project. message GetClusterRequest { // Required. The ID of the Google Cloud Platform project that the cluster // belongs to. - string project_id = 1; + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; // Required. The Cloud Dataproc region in which to handle the request. - string region = 3; + string region = 3 [(google.api.field_behavior) = REQUIRED]; // Required. The cluster name. - string cluster_name = 2; + string cluster_name = 2 [(google.api.field_behavior) = REQUIRED]; } // A request to list the clusters in a project. message ListClustersRequest { // Required. The ID of the Google Cloud Platform project that the cluster // belongs to. - string project_id = 1; + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; // Required. The Cloud Dataproc region in which to handle the request. - string region = 4; + string region = 4 [(google.api.field_behavior) = REQUIRED]; // Optional. A filter constraining the clusters to list. Filters are // case-sensitive and have the following syntax: @@ -671,37 +813,37 @@ message ListClustersRequest { // // status.state = ACTIVE AND clusterName = mycluster // AND labels.env = staging AND labels.starred = * - string filter = 5; + string filter = 5 [(google.api.field_behavior) = OPTIONAL]; // Optional. The standard List page size. - int32 page_size = 2; + int32 page_size = 2 [(google.api.field_behavior) = OPTIONAL]; // Optional. The standard List page token. - string page_token = 3; + string page_token = 3 [(google.api.field_behavior) = OPTIONAL]; } // The list of all clusters in a project. message ListClustersResponse { // Output only. The clusters in the project. - repeated Cluster clusters = 1; + repeated Cluster clusters = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. This token is included in the response if there are more // results to fetch. To fetch additional results, provide this value as the // `page_token` in a subsequent `ListClustersRequest`. - string next_page_token = 2; + string next_page_token = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; } // A request to collect cluster diagnostic information. message DiagnoseClusterRequest { // Required. The ID of the Google Cloud Platform project that the cluster // belongs to. - string project_id = 1; + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; // Required. The Cloud Dataproc region in which to handle the request. - string region = 3; + string region = 3 [(google.api.field_behavior) = REQUIRED]; // Required. The cluster name. - string cluster_name = 2; + string cluster_name = 2 [(google.api.field_behavior) = REQUIRED]; } // The location of diagnostic output. @@ -709,5 +851,5 @@ message DiagnoseClusterResults { // Output only. The Cloud Storage URI of the diagnostic output. // The output report is a plain text file with a summary of collected // diagnostics. - string output_uri = 1; + string output_uri = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; } diff --git a/dataproc/google/cloud/dataproc_v1/proto/clusters_pb2.py b/dataproc/google/cloud/dataproc_v1/proto/clusters_pb2.py index a5f3e98dd301..ca9065f58faa 100644 --- a/dataproc/google/cloud/dataproc_v1/proto/clusters_pb2.py +++ b/dataproc/google/cloud/dataproc_v1/proto/clusters_pb2.py @@ -16,6 +16,8 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 from google.cloud.dataproc_v1.proto import ( operations_pb2 as google_dot_cloud_dot_dataproc__v1_dot_proto_dot_operations__pb2, ) @@ -38,10 +40,12 @@ "\n\034com.google.cloud.dataproc.v1B\rClustersProtoP\001Z@google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataproc" ), serialized_pb=_b( - '\n-google/cloud/dataproc_v1/proto/clusters.proto\x12\x18google.cloud.dataproc.v1\x1a\x1cgoogle/api/annotations.proto\x1a/google/cloud/dataproc_v1/proto/operations.proto\x1a+google/cloud/dataproc_v1/proto/shared.proto\x1a#google/longrunning/operations.proto\x1a\x1egoogle/protobuf/duration.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xa5\x03\n\x07\x43luster\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x14\n\x0c\x63luster_name\x18\x02 \x01(\t\x12\x37\n\x06\x63onfig\x18\x03 \x01(\x0b\x32\'.google.cloud.dataproc.v1.ClusterConfig\x12=\n\x06labels\x18\x08 \x03(\x0b\x32-.google.cloud.dataproc.v1.Cluster.LabelsEntry\x12\x37\n\x06status\x18\x04 \x01(\x0b\x32\'.google.cloud.dataproc.v1.ClusterStatus\x12?\n\x0estatus_history\x18\x07 \x03(\x0b\x32\'.google.cloud.dataproc.v1.ClusterStatus\x12\x14\n\x0c\x63luster_uuid\x18\x06 \x01(\t\x12\x39\n\x07metrics\x18\t \x01(\x0b\x32(.google.cloud.dataproc.v1.ClusterMetrics\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xa8\x04\n\rClusterConfig\x12\x15\n\rconfig_bucket\x18\x01 \x01(\t\x12\x46\n\x12gce_cluster_config\x18\x08 \x01(\x0b\x32*.google.cloud.dataproc.v1.GceClusterConfig\x12\x44\n\rmaster_config\x18\t \x01(\x0b\x32-.google.cloud.dataproc.v1.InstanceGroupConfig\x12\x44\n\rworker_config\x18\n \x01(\x0b\x32-.google.cloud.dataproc.v1.InstanceGroupConfig\x12N\n\x17secondary_worker_config\x18\x0c \x01(\x0b\x32-.google.cloud.dataproc.v1.InstanceGroupConfig\x12\x41\n\x0fsoftware_config\x18\r \x01(\x0b\x32(.google.cloud.dataproc.v1.SoftwareConfig\x12R\n\x16initialization_actions\x18\x0b \x03(\x0b\x32\x32.google.cloud.dataproc.v1.NodeInitializationAction\x12\x45\n\x11\x65ncryption_config\x18\x0f \x01(\x0b\x32*.google.cloud.dataproc.v1.EncryptionConfig"/\n\x10\x45ncryptionConfig\x12\x1b\n\x13gce_pd_kms_key_name\x18\x01 \x01(\t"\xaf\x02\n\x10GceClusterConfig\x12\x10\n\x08zone_uri\x18\x01 \x01(\t\x12\x13\n\x0bnetwork_uri\x18\x02 \x01(\t\x12\x16\n\x0esubnetwork_uri\x18\x06 \x01(\t\x12\x18\n\x10internal_ip_only\x18\x07 \x01(\x08\x12\x17\n\x0fservice_account\x18\x08 \x01(\t\x12\x1e\n\x16service_account_scopes\x18\x03 \x03(\t\x12\x0c\n\x04tags\x18\x04 \x03(\t\x12J\n\x08metadata\x18\x05 \x03(\x0b\x32\x38.google.cloud.dataproc.v1.GceClusterConfig.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xd3\x02\n\x13InstanceGroupConfig\x12\x15\n\rnum_instances\x18\x01 \x01(\x05\x12\x16\n\x0einstance_names\x18\x02 \x03(\t\x12\x11\n\timage_uri\x18\x03 \x01(\t\x12\x18\n\x10machine_type_uri\x18\x04 \x01(\t\x12\x39\n\x0b\x64isk_config\x18\x05 \x01(\x0b\x32$.google.cloud.dataproc.v1.DiskConfig\x12\x16\n\x0eis_preemptible\x18\x06 \x01(\x08\x12J\n\x14managed_group_config\x18\x07 \x01(\x0b\x32,.google.cloud.dataproc.v1.ManagedGroupConfig\x12\x41\n\x0c\x61\x63\x63\x65lerators\x18\x08 \x03(\x0b\x32+.google.cloud.dataproc.v1.AcceleratorConfig"Y\n\x12ManagedGroupConfig\x12\x1e\n\x16instance_template_name\x18\x01 \x01(\t\x12#\n\x1binstance_group_manager_name\x18\x02 \x01(\t"L\n\x11\x41\x63\x63\x65leratorConfig\x12\x1c\n\x14\x61\x63\x63\x65lerator_type_uri\x18\x01 \x01(\t\x12\x19\n\x11\x61\x63\x63\x65lerator_count\x18\x02 \x01(\x05"W\n\nDiskConfig\x12\x16\n\x0e\x62oot_disk_type\x18\x03 \x01(\t\x12\x19\n\x11\x62oot_disk_size_gb\x18\x01 \x01(\x05\x12\x16\n\x0enum_local_ssds\x18\x02 \x01(\x05"i\n\x18NodeInitializationAction\x12\x17\n\x0f\x65xecutable_file\x18\x01 \x01(\t\x12\x34\n\x11\x65xecution_timeout\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration"\xed\x02\n\rClusterStatus\x12<\n\x05state\x18\x01 \x01(\x0e\x32-.google.cloud.dataproc.v1.ClusterStatus.State\x12\x0e\n\x06\x64\x65tail\x18\x02 \x01(\t\x12\x34\n\x10state_start_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x42\n\x08substate\x18\x04 \x01(\x0e\x32\x30.google.cloud.dataproc.v1.ClusterStatus.Substate"V\n\x05State\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0c\n\x08\x43REATING\x10\x01\x12\x0b\n\x07RUNNING\x10\x02\x12\t\n\x05\x45RROR\x10\x03\x12\x0c\n\x08\x44\x45LETING\x10\x04\x12\x0c\n\x08UPDATING\x10\x05"<\n\x08Substate\x12\x0f\n\x0bUNSPECIFIED\x10\x00\x12\r\n\tUNHEALTHY\x10\x01\x12\x10\n\x0cSTALE_STATUS\x10\x02"\xea\x01\n\x0eSoftwareConfig\x12\x15\n\rimage_version\x18\x01 \x01(\t\x12L\n\nproperties\x18\x02 \x03(\x0b\x32\x38.google.cloud.dataproc.v1.SoftwareConfig.PropertiesEntry\x12@\n\x13optional_components\x18\x03 \x03(\x0e\x32#.google.cloud.dataproc.v1.Component\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x9a\x02\n\x0e\x43lusterMetrics\x12O\n\x0chdfs_metrics\x18\x01 \x03(\x0b\x32\x39.google.cloud.dataproc.v1.ClusterMetrics.HdfsMetricsEntry\x12O\n\x0cyarn_metrics\x18\x02 \x03(\x0b\x32\x39.google.cloud.dataproc.v1.ClusterMetrics.YarnMetricsEntry\x1a\x32\n\x10HdfsMetricsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x03:\x02\x38\x01\x1a\x32\n\x10YarnMetricsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x03:\x02\x38\x01"\x82\x01\n\x14\x43reateClusterRequest\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x0e\n\x06region\x18\x03 \x01(\t\x12\x32\n\x07\x63luster\x18\x02 \x01(\x0b\x32!.google.cloud.dataproc.v1.Cluster\x12\x12\n\nrequest_id\x18\x04 \x01(\t"\x8b\x02\n\x14UpdateClusterRequest\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x0e\n\x06region\x18\x05 \x01(\t\x12\x14\n\x0c\x63luster_name\x18\x02 \x01(\t\x12\x32\n\x07\x63luster\x18\x03 \x01(\x0b\x32!.google.cloud.dataproc.v1.Cluster\x12@\n\x1dgraceful_decommission_timeout\x18\x06 \x01(\x0b\x32\x19.google.protobuf.Duration\x12/\n\x0bupdate_mask\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\x12\x12\n\nrequest_id\x18\x07 \x01(\t"z\n\x14\x44\x65leteClusterRequest\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x0e\n\x06region\x18\x03 \x01(\t\x12\x14\n\x0c\x63luster_name\x18\x02 \x01(\t\x12\x14\n\x0c\x63luster_uuid\x18\x04 \x01(\t\x12\x12\n\nrequest_id\x18\x05 \x01(\t"M\n\x11GetClusterRequest\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x0e\n\x06region\x18\x03 \x01(\t\x12\x14\n\x0c\x63luster_name\x18\x02 \x01(\t"p\n\x13ListClustersRequest\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x0e\n\x06region\x18\x04 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x05 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"d\n\x14ListClustersResponse\x12\x33\n\x08\x63lusters\x18\x01 \x03(\x0b\x32!.google.cloud.dataproc.v1.Cluster\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"R\n\x16\x44iagnoseClusterRequest\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x0e\n\x06region\x18\x03 \x01(\t\x12\x14\n\x0c\x63luster_name\x18\x02 \x01(\t",\n\x16\x44iagnoseClusterResults\x12\x12\n\noutput_uri\x18\x01 \x01(\t2\xb2\x08\n\x11\x43lusterController\x12\xa4\x01\n\rCreateCluster\x12..google.cloud.dataproc.v1.CreateClusterRequest\x1a\x1d.google.longrunning.Operation"D\x82\xd3\xe4\x93\x02>"3/v1/projects/{project_id}/regions/{region}/clusters:\x07\x63luster\x12\xb3\x01\n\rUpdateCluster\x12..google.cloud.dataproc.v1.UpdateClusterRequest\x1a\x1d.google.longrunning.Operation"S\x82\xd3\xe4\x93\x02M2B/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}:\x07\x63luster\x12\xaa\x01\n\rDeleteCluster\x12..google.cloud.dataproc.v1.DeleteClusterRequest\x1a\x1d.google.longrunning.Operation"J\x82\xd3\xe4\x93\x02\x44*B/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}\x12\xa8\x01\n\nGetCluster\x12+.google.cloud.dataproc.v1.GetClusterRequest\x1a!.google.cloud.dataproc.v1.Cluster"J\x82\xd3\xe4\x93\x02\x44\x12\x42/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}\x12\xaa\x01\n\x0cListClusters\x12-.google.cloud.dataproc.v1.ListClustersRequest\x1a..google.cloud.dataproc.v1.ListClustersResponse";\x82\xd3\xe4\x93\x02\x35\x12\x33/v1/projects/{project_id}/regions/{region}/clusters\x12\xba\x01\n\x0f\x44iagnoseCluster\x12\x30.google.cloud.dataproc.v1.DiagnoseClusterRequest\x1a\x1d.google.longrunning.Operation"V\x82\xd3\xe4\x93\x02P"K/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}:diagnose:\x01*Bq\n\x1c\x63om.google.cloud.dataproc.v1B\rClustersProtoP\x01Z@google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataprocb\x06proto3' + '\n-google/cloud/dataproc_v1/proto/clusters.proto\x12\x18google.cloud.dataproc.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a/google/cloud/dataproc_v1/proto/operations.proto\x1a+google/cloud/dataproc_v1/proto/shared.proto\x1a#google/longrunning/operations.proto\x1a\x1egoogle/protobuf/duration.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xb9\x03\n\x07\x43luster\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x14\n\x0c\x63luster_name\x18\x02 \x01(\t\x12\x37\n\x06\x63onfig\x18\x03 \x01(\x0b\x32\'.google.cloud.dataproc.v1.ClusterConfig\x12\x42\n\x06labels\x18\x08 \x03(\x0b\x32-.google.cloud.dataproc.v1.Cluster.LabelsEntryB\x03\xe0\x41\x01\x12<\n\x06status\x18\x04 \x01(\x0b\x32\'.google.cloud.dataproc.v1.ClusterStatusB\x03\xe0\x41\x03\x12\x44\n\x0estatus_history\x18\x07 \x03(\x0b\x32\'.google.cloud.dataproc.v1.ClusterStatusB\x03\xe0\x41\x03\x12\x19\n\x0c\x63luster_uuid\x18\x06 \x01(\tB\x03\xe0\x41\x03\x12\x39\n\x07metrics\x18\t \x01(\x0b\x32(.google.cloud.dataproc.v1.ClusterMetrics\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xe6\x05\n\rClusterConfig\x12\x1a\n\rconfig_bucket\x18\x01 \x01(\tB\x03\xe0\x41\x01\x12K\n\x12gce_cluster_config\x18\x08 \x01(\x0b\x32*.google.cloud.dataproc.v1.GceClusterConfigB\x03\xe0\x41\x01\x12I\n\rmaster_config\x18\t \x01(\x0b\x32-.google.cloud.dataproc.v1.InstanceGroupConfigB\x03\xe0\x41\x01\x12I\n\rworker_config\x18\n \x01(\x0b\x32-.google.cloud.dataproc.v1.InstanceGroupConfigB\x03\xe0\x41\x01\x12S\n\x17secondary_worker_config\x18\x0c \x01(\x0b\x32-.google.cloud.dataproc.v1.InstanceGroupConfigB\x03\xe0\x41\x01\x12\x46\n\x0fsoftware_config\x18\r \x01(\x0b\x32(.google.cloud.dataproc.v1.SoftwareConfigB\x03\xe0\x41\x01\x12W\n\x16initialization_actions\x18\x0b \x03(\x0b\x32\x32.google.cloud.dataproc.v1.NodeInitializationActionB\x03\xe0\x41\x01\x12J\n\x11\x65ncryption_config\x18\x0f \x01(\x0b\x32*.google.cloud.dataproc.v1.EncryptionConfigB\x03\xe0\x41\x01\x12L\n\x12\x61utoscaling_config\x18\x12 \x01(\x0b\x32+.google.cloud.dataproc.v1.AutoscalingConfigB\x03\xe0\x41\x01\x12\x46\n\x0fsecurity_config\x18\x10 \x01(\x0b\x32(.google.cloud.dataproc.v1.SecurityConfigB\x03\xe0\x41\x01",\n\x11\x41utoscalingConfig\x12\x17\n\npolicy_uri\x18\x01 \x01(\tB\x03\xe0\x41\x01"4\n\x10\x45ncryptionConfig\x12 \n\x13gce_pd_kms_key_name\x18\x01 \x01(\tB\x03\xe0\x41\x01"\xcd\x02\n\x10GceClusterConfig\x12\x15\n\x08zone_uri\x18\x01 \x01(\tB\x03\xe0\x41\x01\x12\x18\n\x0bnetwork_uri\x18\x02 \x01(\tB\x03\xe0\x41\x01\x12\x1b\n\x0esubnetwork_uri\x18\x06 \x01(\tB\x03\xe0\x41\x01\x12\x1d\n\x10internal_ip_only\x18\x07 \x01(\x08\x42\x03\xe0\x41\x01\x12\x1c\n\x0fservice_account\x18\x08 \x01(\tB\x03\xe0\x41\x01\x12#\n\x16service_account_scopes\x18\x03 \x03(\tB\x03\xe0\x41\x01\x12\x0c\n\x04tags\x18\x04 \x03(\t\x12J\n\x08metadata\x18\x05 \x03(\x0b\x32\x38.google.cloud.dataproc.v1.GceClusterConfig.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x9a\x03\n\x13InstanceGroupConfig\x12\x1a\n\rnum_instances\x18\x01 \x01(\x05\x42\x03\xe0\x41\x01\x12\x1b\n\x0einstance_names\x18\x02 \x03(\tB\x03\xe0\x41\x03\x12\x16\n\timage_uri\x18\x03 \x01(\tB\x03\xe0\x41\x01\x12\x1d\n\x10machine_type_uri\x18\x04 \x01(\tB\x03\xe0\x41\x01\x12>\n\x0b\x64isk_config\x18\x05 \x01(\x0b\x32$.google.cloud.dataproc.v1.DiskConfigB\x03\xe0\x41\x01\x12\x1b\n\x0eis_preemptible\x18\x06 \x01(\x08\x42\x03\xe0\x41\x01\x12O\n\x14managed_group_config\x18\x07 \x01(\x0b\x32,.google.cloud.dataproc.v1.ManagedGroupConfigB\x03\xe0\x41\x03\x12\x46\n\x0c\x61\x63\x63\x65lerators\x18\x08 \x03(\x0b\x32+.google.cloud.dataproc.v1.AcceleratorConfigB\x03\xe0\x41\x01\x12\x1d\n\x10min_cpu_platform\x18\t \x01(\tB\x03\xe0\x41\x01"c\n\x12ManagedGroupConfig\x12#\n\x16instance_template_name\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12(\n\x1binstance_group_manager_name\x18\x02 \x01(\tB\x03\xe0\x41\x03"L\n\x11\x41\x63\x63\x65leratorConfig\x12\x1c\n\x14\x61\x63\x63\x65lerator_type_uri\x18\x01 \x01(\t\x12\x19\n\x11\x61\x63\x63\x65lerator_count\x18\x02 \x01(\x05"f\n\nDiskConfig\x12\x1b\n\x0e\x62oot_disk_type\x18\x03 \x01(\tB\x03\xe0\x41\x01\x12\x1e\n\x11\x62oot_disk_size_gb\x18\x01 \x01(\x05\x42\x03\xe0\x41\x01\x12\x1b\n\x0enum_local_ssds\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01"s\n\x18NodeInitializationAction\x12\x1c\n\x0f\x65xecutable_file\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x39\n\x11\x65xecution_timeout\x18\x02 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x01"\x84\x03\n\rClusterStatus\x12\x41\n\x05state\x18\x01 \x01(\x0e\x32-.google.cloud.dataproc.v1.ClusterStatus.StateB\x03\xe0\x41\x03\x12\x16\n\x06\x64\x65tail\x18\x02 \x01(\tB\x06\xe0\x41\x03\xe0\x41\x01\x12\x39\n\x10state_start_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12G\n\x08substate\x18\x04 \x01(\x0e\x32\x30.google.cloud.dataproc.v1.ClusterStatus.SubstateB\x03\xe0\x41\x03"V\n\x05State\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0c\n\x08\x43REATING\x10\x01\x12\x0b\n\x07RUNNING\x10\x02\x12\t\n\x05\x45RROR\x10\x03\x12\x0c\n\x08\x44\x45LETING\x10\x04\x12\x0c\n\x08UPDATING\x10\x05"<\n\x08Substate\x12\x0f\n\x0bUNSPECIFIED\x10\x00\x12\r\n\tUNHEALTHY\x10\x01\x12\x10\n\x0cSTALE_STATUS\x10\x02"S\n\x0eSecurityConfig\x12\x41\n\x0fkerberos_config\x18\x01 \x01(\x0b\x32(.google.cloud.dataproc.v1.KerberosConfig"\x90\x04\n\x0eKerberosConfig\x12\x1c\n\x0f\x65nable_kerberos\x18\x01 \x01(\x08\x42\x03\xe0\x41\x01\x12(\n\x1broot_principal_password_uri\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x18\n\x0bkms_key_uri\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0ckeystore_uri\x18\x04 \x01(\tB\x03\xe0\x41\x01\x12\x1b\n\x0etruststore_uri\x18\x05 \x01(\tB\x03\xe0\x41\x01\x12"\n\x15keystore_password_uri\x18\x06 \x01(\tB\x03\xe0\x41\x01\x12\x1d\n\x10key_password_uri\x18\x07 \x01(\tB\x03\xe0\x41\x01\x12$\n\x17truststore_password_uri\x18\x08 \x01(\tB\x03\xe0\x41\x01\x12$\n\x17\x63ross_realm_trust_realm\x18\t \x01(\tB\x03\xe0\x41\x01\x12"\n\x15\x63ross_realm_trust_kdc\x18\n \x01(\tB\x03\xe0\x41\x01\x12+\n\x1e\x63ross_realm_trust_admin_server\x18\x0b \x01(\tB\x03\xe0\x41\x01\x12\x32\n%cross_realm_trust_shared_password_uri\x18\x0c \x01(\tB\x03\xe0\x41\x01\x12\x1b\n\x0ekdc_db_key_uri\x18\r \x01(\tB\x03\xe0\x41\x01\x12\x1f\n\x12tgt_lifetime_hours\x18\x0e \x01(\x05\x42\x03\xe0\x41\x01\x12\x12\n\x05realm\x18\x0f \x01(\tB\x03\xe0\x41\x01"\xf9\x01\n\x0eSoftwareConfig\x12\x1a\n\rimage_version\x18\x01 \x01(\tB\x03\xe0\x41\x01\x12Q\n\nproperties\x18\x02 \x03(\x0b\x32\x38.google.cloud.dataproc.v1.SoftwareConfig.PropertiesEntryB\x03\xe0\x41\x01\x12\x45\n\x13optional_components\x18\x03 \x03(\x0e\x32#.google.cloud.dataproc.v1.ComponentB\x03\xe0\x41\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x9a\x02\n\x0e\x43lusterMetrics\x12O\n\x0chdfs_metrics\x18\x01 \x03(\x0b\x32\x39.google.cloud.dataproc.v1.ClusterMetrics.HdfsMetricsEntry\x12O\n\x0cyarn_metrics\x18\x02 \x03(\x0b\x32\x39.google.cloud.dataproc.v1.ClusterMetrics.YarnMetricsEntry\x1a\x32\n\x10HdfsMetricsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x03:\x02\x38\x01\x1a\x32\n\x10YarnMetricsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x03:\x02\x38\x01"\x96\x01\n\x14\x43reateClusterRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x37\n\x07\x63luster\x18\x02 \x01(\x0b\x32!.google.cloud.dataproc.v1.ClusterB\x03\xe0\x41\x02\x12\x17\n\nrequest_id\x18\x04 \x01(\tB\x03\xe0\x41\x01"\xae\x02\n\x14UpdateClusterRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x05 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_name\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x37\n\x07\x63luster\x18\x03 \x01(\x0b\x32!.google.cloud.dataproc.v1.ClusterB\x03\xe0\x41\x02\x12\x45\n\x1dgraceful_decommission_timeout\x18\x06 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x01\x12\x34\n\x0bupdate_mask\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x02\x12\x17\n\nrequest_id\x18\x07 \x01(\tB\x03\xe0\x41\x01"\x93\x01\n\x14\x44\x65leteClusterRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_name\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_uuid\x18\x04 \x01(\tB\x03\xe0\x41\x01\x12\x17\n\nrequest_id\x18\x05 \x01(\tB\x03\xe0\x41\x01"\\\n\x11GetClusterRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_name\x18\x02 \x01(\tB\x03\xe0\x41\x02"\x89\x01\n\x13ListClustersRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x04 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06\x66ilter\x18\x05 \x01(\tB\x03\xe0\x41\x01\x12\x16\n\tpage_size\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01\x12\x17\n\npage_token\x18\x03 \x01(\tB\x03\xe0\x41\x01"n\n\x14ListClustersResponse\x12\x38\n\x08\x63lusters\x18\x01 \x03(\x0b\x32!.google.cloud.dataproc.v1.ClusterB\x03\xe0\x41\x03\x12\x1c\n\x0fnext_page_token\x18\x02 \x01(\tB\x03\xe0\x41\x03"a\n\x16\x44iagnoseClusterRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_name\x18\x02 \x01(\tB\x03\xe0\x41\x02"1\n\x16\x44iagnoseClusterResults\x12\x17\n\noutput_uri\x18\x01 \x01(\tB\x03\xe0\x41\x03\x32\xae\x0c\n\x11\x43lusterController\x12\x80\x02\n\rCreateCluster\x12..google.cloud.dataproc.v1.CreateClusterRequest\x1a\x1d.google.longrunning.Operation"\x9f\x01\x82\xd3\xe4\x93\x02>"3/v1/projects/{project_id}/regions/{region}/clusters:\x07\x63luster\xda\x41\x19project_id,region,cluster\xca\x41<\n\x07\x43luster\x12\x31google.cloud.dataproc.v1.ClusterOperationMetadata\x12\xf3\x01\n\rUpdateCluster\x12..google.cloud.dataproc.v1.UpdateClusterRequest\x1a\x1d.google.longrunning.Operation"\x92\x01\x82\xd3\xe4\x93\x02M2B/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}:\x07\x63luster\xca\x41<\n\x07\x43luster\x12\x31google.cloud.dataproc.v1.ClusterOperationMetadata\x12\x99\x02\n\rDeleteCluster\x12..google.cloud.dataproc.v1.DeleteClusterRequest\x1a\x1d.google.longrunning.Operation"\xb8\x01\x82\xd3\xe4\x93\x02\x44*B/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}\xda\x41\x1eproject_id,region,cluster_name\xca\x41J\n\x15google.protobuf.Empty\x12\x31google.cloud.dataproc.v1.ClusterOperationMetadata\x12\xc9\x01\n\nGetCluster\x12+.google.cloud.dataproc.v1.GetClusterRequest\x1a!.google.cloud.dataproc.v1.Cluster"k\x82\xd3\xe4\x93\x02\x44\x12\x42/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}\xda\x41\x1eproject_id,region,cluster_name\x12\xd9\x01\n\x0cListClusters\x12-.google.cloud.dataproc.v1.ListClustersRequest\x1a..google.cloud.dataproc.v1.ListClustersResponse"j\x82\xd3\xe4\x93\x02\x35\x12\x33/v1/projects/{project_id}/regions/{region}/clusters\xda\x41\x11project_id,region\xda\x41\x18project_id,region,filter\x12\x8e\x02\n\x0f\x44iagnoseCluster\x12\x30.google.cloud.dataproc.v1.DiagnoseClusterRequest\x1a\x1d.google.longrunning.Operation"\xa9\x01\x82\xd3\xe4\x93\x02P"K/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}:diagnose:\x01*\xda\x41\x1eproject_id,region,cluster_name\xca\x41/\n\x15google.protobuf.Empty\x12\x16\x44iagnoseClusterResults\x1aK\xca\x41\x17\x64\x61taproc.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformBq\n\x1c\x63om.google.cloud.dataproc.v1B\rClustersProtoP\x01Z@google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataprocb\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_client__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, google_dot_cloud_dot_dataproc__v1_dot_proto_dot_operations__pb2.DESCRIPTOR, google_dot_cloud_dot_dataproc__v1_dot_proto_dot_shared__pb2.DESCRIPTOR, google_dot_longrunning_dot_operations__pb2.DESCRIPTOR, @@ -79,8 +83,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=2594, - serialized_end=2680, + serialized_start=3072, + serialized_end=3158, ) _sym_db.RegisterEnumDescriptor(_CLUSTERSTATUS_STATE) @@ -102,8 +106,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=2682, - serialized_end=2742, + serialized_start=3160, + serialized_end=3220, ) _sym_db.RegisterEnumDescriptor(_CLUSTERSTATUS_SUBSTATE) @@ -160,8 +164,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=712, - serialized_end=757, + serialized_start=790, + serialized_end=835, ) _CLUSTER = _descriptor.Descriptor( @@ -240,7 +244,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -258,7 +262,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -276,7 +280,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -294,7 +298,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -324,8 +328,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=336, - serialized_end=757, + serialized_start=394, + serialized_end=835, ) @@ -351,7 +355,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -369,7 +373,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -387,7 +391,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -405,7 +409,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -423,7 +427,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -441,7 +445,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -459,7 +463,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -477,7 +481,43 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="autoscaling_config", + full_name="google.cloud.dataproc.v1.ClusterConfig.autoscaling_config", + index=8, + number=18, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="security_config", + full_name="google.cloud.dataproc.v1.ClusterConfig.security_config", + index=9, + number=16, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -489,8 +529,47 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=760, - serialized_end=1312, + serialized_start=838, + serialized_end=1580, +) + + +_AUTOSCALINGCONFIG = _descriptor.Descriptor( + name="AutoscalingConfig", + full_name="google.cloud.dataproc.v1.AutoscalingConfig", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="policy_uri", + full_name="google.cloud.dataproc.v1.AutoscalingConfig.policy_uri", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1582, + serialized_end=1626, ) @@ -516,7 +595,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ) ], @@ -528,8 +607,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1314, - serialized_end=1361, + serialized_start=1628, + serialized_end=1680, ) @@ -585,8 +664,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1620, - serialized_end=1667, + serialized_start=1969, + serialized_end=2016, ) _GCECLUSTERCONFIG = _descriptor.Descriptor( @@ -611,7 +690,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -629,7 +708,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -647,7 +726,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -665,7 +744,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -683,7 +762,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -701,7 +780,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -749,8 +828,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1364, - serialized_end=1667, + serialized_start=1683, + serialized_end=2016, ) @@ -776,7 +855,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -794,7 +873,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -812,7 +891,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -830,7 +909,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -848,7 +927,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -866,7 +945,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -884,7 +963,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -902,7 +981,25 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="min_cpu_platform", + full_name="google.cloud.dataproc.v1.InstanceGroupConfig.min_cpu_platform", + index=8, + number=9, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -914,8 +1011,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1670, - serialized_end=2009, + serialized_start=2019, + serialized_end=2429, ) @@ -941,7 +1038,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -959,7 +1056,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], @@ -971,8 +1068,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2011, - serialized_end=2100, + serialized_start=2431, + serialized_end=2530, ) @@ -1028,8 +1125,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2102, - serialized_end=2178, + serialized_start=2532, + serialized_end=2608, ) @@ -1055,7 +1152,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1073,7 +1170,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1091,7 +1188,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -1103,8 +1200,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2180, - serialized_end=2267, + serialized_start=2610, + serialized_end=2712, ) @@ -1130,7 +1227,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1148,7 +1245,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -1160,8 +1257,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2269, - serialized_end=2374, + serialized_start=2714, + serialized_end=2829, ) @@ -1187,7 +1284,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1205,7 +1302,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1223,7 +1320,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1241,7 +1338,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], @@ -1253,8 +1350,338 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2377, - serialized_end=2742, + serialized_start=2832, + serialized_end=3220, +) + + +_SECURITYCONFIG = _descriptor.Descriptor( + name="SecurityConfig", + full_name="google.cloud.dataproc.v1.SecurityConfig", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="kerberos_config", + full_name="google.cloud.dataproc.v1.SecurityConfig.kerberos_config", + index=0, + number=1, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=3222, + serialized_end=3305, +) + + +_KERBEROSCONFIG = _descriptor.Descriptor( + name="KerberosConfig", + full_name="google.cloud.dataproc.v1.KerberosConfig", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="enable_kerberos", + full_name="google.cloud.dataproc.v1.KerberosConfig.enable_kerberos", + index=0, + number=1, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="root_principal_password_uri", + full_name="google.cloud.dataproc.v1.KerberosConfig.root_principal_password_uri", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\002"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="kms_key_uri", + full_name="google.cloud.dataproc.v1.KerberosConfig.kms_key_uri", + index=2, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\002"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="keystore_uri", + full_name="google.cloud.dataproc.v1.KerberosConfig.keystore_uri", + index=3, + number=4, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="truststore_uri", + full_name="google.cloud.dataproc.v1.KerberosConfig.truststore_uri", + index=4, + number=5, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="keystore_password_uri", + full_name="google.cloud.dataproc.v1.KerberosConfig.keystore_password_uri", + index=5, + number=6, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="key_password_uri", + full_name="google.cloud.dataproc.v1.KerberosConfig.key_password_uri", + index=6, + number=7, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="truststore_password_uri", + full_name="google.cloud.dataproc.v1.KerberosConfig.truststore_password_uri", + index=7, + number=8, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="cross_realm_trust_realm", + full_name="google.cloud.dataproc.v1.KerberosConfig.cross_realm_trust_realm", + index=8, + number=9, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="cross_realm_trust_kdc", + full_name="google.cloud.dataproc.v1.KerberosConfig.cross_realm_trust_kdc", + index=9, + number=10, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="cross_realm_trust_admin_server", + full_name="google.cloud.dataproc.v1.KerberosConfig.cross_realm_trust_admin_server", + index=10, + number=11, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="cross_realm_trust_shared_password_uri", + full_name="google.cloud.dataproc.v1.KerberosConfig.cross_realm_trust_shared_password_uri", + index=11, + number=12, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="kdc_db_key_uri", + full_name="google.cloud.dataproc.v1.KerberosConfig.kdc_db_key_uri", + index=12, + number=13, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="tgt_lifetime_hours", + full_name="google.cloud.dataproc.v1.KerberosConfig.tgt_lifetime_hours", + index=13, + number=14, + type=5, + cpp_type=1, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="realm", + full_name="google.cloud.dataproc.v1.KerberosConfig.realm", + index=14, + number=15, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=3308, + serialized_end=3836, ) @@ -1310,8 +1737,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2930, - serialized_end=2979, + serialized_start=4039, + serialized_end=4088, ) _SOFTWARECONFIG = _descriptor.Descriptor( @@ -1336,7 +1763,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1354,7 +1781,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1372,7 +1799,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -1384,8 +1811,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2745, - serialized_end=2979, + serialized_start=3839, + serialized_end=4088, ) @@ -1441,8 +1868,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3162, - serialized_end=3212, + serialized_start=4271, + serialized_end=4321, ) _CLUSTERMETRICS_YARNMETRICSENTRY = _descriptor.Descriptor( @@ -1497,8 +1924,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3214, - serialized_end=3264, + serialized_start=4323, + serialized_end=4373, ) _CLUSTERMETRICS = _descriptor.Descriptor( @@ -1553,8 +1980,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2982, - serialized_end=3264, + serialized_start=4091, + serialized_end=4373, ) @@ -1580,7 +2007,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1598,7 +2025,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1616,7 +2043,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1634,7 +2061,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -1646,8 +2073,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3267, - serialized_end=3397, + serialized_start=4376, + serialized_end=4526, ) @@ -1673,7 +2100,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1691,7 +2118,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1709,7 +2136,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1727,7 +2154,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1745,7 +2172,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1763,7 +2190,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1781,7 +2208,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -1793,8 +2220,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3400, - serialized_end=3667, + serialized_start=4529, + serialized_end=4831, ) @@ -1820,7 +2247,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1838,7 +2265,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1856,7 +2283,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1874,7 +2301,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1892,7 +2319,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -1904,8 +2331,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3669, - serialized_end=3791, + serialized_start=4834, + serialized_end=4981, ) @@ -1931,7 +2358,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1949,7 +2376,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1967,7 +2394,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -1979,8 +2406,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3793, - serialized_end=3870, + serialized_start=4983, + serialized_end=5075, ) @@ -2006,7 +2433,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2024,7 +2451,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2042,7 +2469,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2060,7 +2487,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2078,7 +2505,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -2090,8 +2517,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3872, - serialized_end=3984, + serialized_start=5078, + serialized_end=5215, ) @@ -2117,7 +2544,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2135,7 +2562,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], @@ -2147,8 +2574,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3986, - serialized_end=4086, + serialized_start=5217, + serialized_end=5327, ) @@ -2174,7 +2601,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2192,7 +2619,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2210,7 +2637,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -2222,8 +2649,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4088, - serialized_end=4170, + serialized_start=5329, + serialized_end=5426, ) @@ -2249,7 +2676,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ) ], @@ -2261,8 +2688,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4172, - serialized_end=4216, + serialized_start=5428, + serialized_end=5477, ) _CLUSTER_LABELSENTRY.containing_type = _CLUSTER @@ -2282,6 +2709,8 @@ "initialization_actions" ].message_type = _NODEINITIALIZATIONACTION _CLUSTERCONFIG.fields_by_name["encryption_config"].message_type = _ENCRYPTIONCONFIG +_CLUSTERCONFIG.fields_by_name["autoscaling_config"].message_type = _AUTOSCALINGCONFIG +_CLUSTERCONFIG.fields_by_name["security_config"].message_type = _SECURITYCONFIG _GCECLUSTERCONFIG_METADATAENTRY.containing_type = _GCECLUSTERCONFIG _GCECLUSTERCONFIG.fields_by_name[ "metadata" @@ -2301,6 +2730,7 @@ _CLUSTERSTATUS.fields_by_name["substate"].enum_type = _CLUSTERSTATUS_SUBSTATE _CLUSTERSTATUS_STATE.containing_type = _CLUSTERSTATUS _CLUSTERSTATUS_SUBSTATE.containing_type = _CLUSTERSTATUS +_SECURITYCONFIG.fields_by_name["kerberos_config"].message_type = _KERBEROSCONFIG _SOFTWARECONFIG_PROPERTIESENTRY.containing_type = _SOFTWARECONFIG _SOFTWARECONFIG.fields_by_name[ "properties" @@ -2327,6 +2757,7 @@ _LISTCLUSTERSRESPONSE.fields_by_name["clusters"].message_type = _CLUSTER DESCRIPTOR.message_types_by_name["Cluster"] = _CLUSTER DESCRIPTOR.message_types_by_name["ClusterConfig"] = _CLUSTERCONFIG +DESCRIPTOR.message_types_by_name["AutoscalingConfig"] = _AUTOSCALINGCONFIG DESCRIPTOR.message_types_by_name["EncryptionConfig"] = _ENCRYPTIONCONFIG DESCRIPTOR.message_types_by_name["GceClusterConfig"] = _GCECLUSTERCONFIG DESCRIPTOR.message_types_by_name["InstanceGroupConfig"] = _INSTANCEGROUPCONFIG @@ -2335,6 +2766,8 @@ DESCRIPTOR.message_types_by_name["DiskConfig"] = _DISKCONFIG DESCRIPTOR.message_types_by_name["NodeInitializationAction"] = _NODEINITIALIZATIONACTION DESCRIPTOR.message_types_by_name["ClusterStatus"] = _CLUSTERSTATUS +DESCRIPTOR.message_types_by_name["SecurityConfig"] = _SECURITYCONFIG +DESCRIPTOR.message_types_by_name["KerberosConfig"] = _KERBEROSCONFIG DESCRIPTOR.message_types_by_name["SoftwareConfig"] = _SOFTWARECONFIG DESCRIPTOR.message_types_by_name["ClusterMetrics"] = _CLUSTERMETRICS DESCRIPTOR.message_types_by_name["CreateClusterRequest"] = _CREATECLUSTERREQUEST @@ -2450,12 +2883,42 @@ else ... worker specific actions ... fi encryption_config: Optional. Encryption settings for the cluster. + autoscaling_config: + Optional. Autoscaling config for the policy associated with + the cluster. Cluster does not autoscale if this field is + unset. + security_config: + Optional. Security settings for the cluster. """, # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.ClusterConfig) ), ) _sym_db.RegisterMessage(ClusterConfig) +AutoscalingConfig = _reflection.GeneratedProtocolMessageType( + "AutoscalingConfig", + (_message.Message,), + dict( + DESCRIPTOR=_AUTOSCALINGCONFIG, + __module__="google.cloud.dataproc_v1.proto.clusters_pb2", + __doc__="""Autoscaling Policy config associated with the cluster. + + + Attributes: + policy_uri: + Optional. The autoscaling policy used by the cluster. Only + resource names including projectid and location (region) are + valid. Examples: - ``https://www.googleapis.com/compute/v1/p + rojects/[project_id]/locations/[dataproc_region]/autoscalingPo + licies/[policy_id]`` - ``projects/[project_id]/locations/[dat + aproc_region]/autoscalingPolicies/[policy_id]`` Note that the + policy must be in the same project and Cloud Dataproc region. + """, + # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.AutoscalingConfig) + ), +) +_sym_db.RegisterMessage(AutoscalingConfig) + EncryptionConfig = _reflection.GeneratedProtocolMessageType( "EncryptionConfig", (_message.Message,), @@ -2614,8 +3077,11 @@ preemptible instance groups. accelerators: Optional. The Compute Engine accelerator configuration for - these instances. **Beta Feature**: This feature is still - under development. It may be changed before final release. + these instances. + min_cpu_platform: + Optional. Specifies the minimum cpu platform for the Instance + Group. See [Cloud Dataproc→Minimum CPU Platform] + (/dataproc/docs/concepts/compute/dataproc-min-cpu). """, # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.InstanceGroupConfig) ), @@ -2748,7 +3214,7 @@ state: Output only. The cluster's state. detail: - Output only. Optional details of cluster's state. + Optional. Output only. Details of cluster's state. state_start_time: Output only. Time when this state was entered. substate: @@ -2760,6 +3226,96 @@ ) _sym_db.RegisterMessage(ClusterStatus) +SecurityConfig = _reflection.GeneratedProtocolMessageType( + "SecurityConfig", + (_message.Message,), + dict( + DESCRIPTOR=_SECURITYCONFIG, + __module__="google.cloud.dataproc_v1.proto.clusters_pb2", + __doc__="""Security related configuration, including Kerberos. + + + Attributes: + kerberos_config: + Kerberos related configuration. + """, + # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.SecurityConfig) + ), +) +_sym_db.RegisterMessage(SecurityConfig) + +KerberosConfig = _reflection.GeneratedProtocolMessageType( + "KerberosConfig", + (_message.Message,), + dict( + DESCRIPTOR=_KERBEROSCONFIG, + __module__="google.cloud.dataproc_v1.proto.clusters_pb2", + __doc__="""Specifies Kerberos related configuration. + + + Attributes: + enable_kerberos: + Optional. Flag to indicate whether to Kerberize the cluster. + root_principal_password_uri: + Required. The Cloud Storage URI of a KMS encrypted file + containing the root principal password. + kms_key_uri: + Required. The uri of the KMS key used to encrypt various + sensitive files. + keystore_uri: + Optional. The Cloud Storage URI of the keystore file used for + SSL encryption. If not provided, Dataproc will provide a self- + signed certificate. + truststore_uri: + Optional. The Cloud Storage URI of the truststore file used + for SSL encryption. If not provided, Dataproc will provide a + self-signed certificate. + keystore_password_uri: + Optional. The Cloud Storage URI of a KMS encrypted file + containing the password to the user provided keystore. For the + self-signed certificate, this password is generated by + Dataproc. + key_password_uri: + Optional. The Cloud Storage URI of a KMS encrypted file + containing the password to the user provided key. For the + self-signed certificate, this password is generated by + Dataproc. + truststore_password_uri: + Optional. The Cloud Storage URI of a KMS encrypted file + containing the password to the user provided truststore. For + the self-signed certificate, this password is generated by + Dataproc. + cross_realm_trust_realm: + Optional. The remote realm the Dataproc on-cluster KDC will + trust, should the user enable cross realm trust. + cross_realm_trust_kdc: + Optional. The KDC (IP or hostname) for the remote trusted + realm in a cross realm trust relationship. + cross_realm_trust_admin_server: + Optional. The admin server (IP or hostname) for the remote + trusted realm in a cross realm trust relationship. + cross_realm_trust_shared_password_uri: + Optional. The Cloud Storage URI of a KMS encrypted file + containing the shared password between the on-cluster Kerberos + realm and the remote trusted realm, in a cross realm trust + relationship. + kdc_db_key_uri: + Optional. The Cloud Storage URI of a KMS encrypted file + containing the master key of the KDC database. + tgt_lifetime_hours: + Optional. The lifetime of the ticket granting ticket, in + hours. If not specified, or user specifies 0, then default + value 10 will be used. + realm: + Optional. The name of the on-cluster Kerberos realm. If not + specified, the uppercased domain of hostnames will be the + realm. + """, + # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.KerberosConfig) + ), +) +_sym_db.RegisterMessage(KerberosConfig) + SoftwareConfig = _reflection.GeneratedProtocolMessageType( "SoftwareConfig", (_message.Message,), @@ -2801,7 +3357,7 @@ ``yarn-site.xml`` For more information, see `Cluster properties `__. optional_components: - The set of optional components to activate on the cluster. + Optional. The set of components to activate on the cluster. """, # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.SoftwareConfig) ), @@ -2946,7 +3502,11 @@ config.secondary\_worker\_config.num\_instances .. raw:: html .. raw:: html Resize secondary worker group .. raw:: html .. raw:: html .. raw:: - html .. raw:: html + html .. raw:: html + config.autoscaling\_config.policy\_uri .. raw:: html + .. raw:: html Use, stop using, or change + autoscaling policies .. raw:: html .. raw:: html + .. raw:: html .. raw:: html request_id: Optional. A unique id used to identify the request. If the server receives two [UpdateClusterRequest][google.cloud.datapr @@ -3142,19 +3702,111 @@ DESCRIPTOR._options = None _CLUSTER_LABELSENTRY._options = None +_CLUSTER.fields_by_name["labels"]._options = None +_CLUSTER.fields_by_name["status"]._options = None +_CLUSTER.fields_by_name["status_history"]._options = None +_CLUSTER.fields_by_name["cluster_uuid"]._options = None +_CLUSTERCONFIG.fields_by_name["config_bucket"]._options = None +_CLUSTERCONFIG.fields_by_name["gce_cluster_config"]._options = None +_CLUSTERCONFIG.fields_by_name["master_config"]._options = None +_CLUSTERCONFIG.fields_by_name["worker_config"]._options = None +_CLUSTERCONFIG.fields_by_name["secondary_worker_config"]._options = None +_CLUSTERCONFIG.fields_by_name["software_config"]._options = None +_CLUSTERCONFIG.fields_by_name["initialization_actions"]._options = None +_CLUSTERCONFIG.fields_by_name["encryption_config"]._options = None +_CLUSTERCONFIG.fields_by_name["autoscaling_config"]._options = None +_CLUSTERCONFIG.fields_by_name["security_config"]._options = None +_AUTOSCALINGCONFIG.fields_by_name["policy_uri"]._options = None +_ENCRYPTIONCONFIG.fields_by_name["gce_pd_kms_key_name"]._options = None _GCECLUSTERCONFIG_METADATAENTRY._options = None +_GCECLUSTERCONFIG.fields_by_name["zone_uri"]._options = None +_GCECLUSTERCONFIG.fields_by_name["network_uri"]._options = None +_GCECLUSTERCONFIG.fields_by_name["subnetwork_uri"]._options = None +_GCECLUSTERCONFIG.fields_by_name["internal_ip_only"]._options = None +_GCECLUSTERCONFIG.fields_by_name["service_account"]._options = None +_GCECLUSTERCONFIG.fields_by_name["service_account_scopes"]._options = None +_INSTANCEGROUPCONFIG.fields_by_name["num_instances"]._options = None +_INSTANCEGROUPCONFIG.fields_by_name["instance_names"]._options = None +_INSTANCEGROUPCONFIG.fields_by_name["image_uri"]._options = None +_INSTANCEGROUPCONFIG.fields_by_name["machine_type_uri"]._options = None +_INSTANCEGROUPCONFIG.fields_by_name["disk_config"]._options = None +_INSTANCEGROUPCONFIG.fields_by_name["is_preemptible"]._options = None +_INSTANCEGROUPCONFIG.fields_by_name["managed_group_config"]._options = None +_INSTANCEGROUPCONFIG.fields_by_name["accelerators"]._options = None +_INSTANCEGROUPCONFIG.fields_by_name["min_cpu_platform"]._options = None +_MANAGEDGROUPCONFIG.fields_by_name["instance_template_name"]._options = None +_MANAGEDGROUPCONFIG.fields_by_name["instance_group_manager_name"]._options = None +_DISKCONFIG.fields_by_name["boot_disk_type"]._options = None +_DISKCONFIG.fields_by_name["boot_disk_size_gb"]._options = None +_DISKCONFIG.fields_by_name["num_local_ssds"]._options = None +_NODEINITIALIZATIONACTION.fields_by_name["executable_file"]._options = None +_NODEINITIALIZATIONACTION.fields_by_name["execution_timeout"]._options = None +_CLUSTERSTATUS.fields_by_name["state"]._options = None +_CLUSTERSTATUS.fields_by_name["detail"]._options = None +_CLUSTERSTATUS.fields_by_name["state_start_time"]._options = None +_CLUSTERSTATUS.fields_by_name["substate"]._options = None +_KERBEROSCONFIG.fields_by_name["enable_kerberos"]._options = None +_KERBEROSCONFIG.fields_by_name["root_principal_password_uri"]._options = None +_KERBEROSCONFIG.fields_by_name["kms_key_uri"]._options = None +_KERBEROSCONFIG.fields_by_name["keystore_uri"]._options = None +_KERBEROSCONFIG.fields_by_name["truststore_uri"]._options = None +_KERBEROSCONFIG.fields_by_name["keystore_password_uri"]._options = None +_KERBEROSCONFIG.fields_by_name["key_password_uri"]._options = None +_KERBEROSCONFIG.fields_by_name["truststore_password_uri"]._options = None +_KERBEROSCONFIG.fields_by_name["cross_realm_trust_realm"]._options = None +_KERBEROSCONFIG.fields_by_name["cross_realm_trust_kdc"]._options = None +_KERBEROSCONFIG.fields_by_name["cross_realm_trust_admin_server"]._options = None +_KERBEROSCONFIG.fields_by_name["cross_realm_trust_shared_password_uri"]._options = None +_KERBEROSCONFIG.fields_by_name["kdc_db_key_uri"]._options = None +_KERBEROSCONFIG.fields_by_name["tgt_lifetime_hours"]._options = None +_KERBEROSCONFIG.fields_by_name["realm"]._options = None _SOFTWARECONFIG_PROPERTIESENTRY._options = None +_SOFTWARECONFIG.fields_by_name["image_version"]._options = None +_SOFTWARECONFIG.fields_by_name["properties"]._options = None +_SOFTWARECONFIG.fields_by_name["optional_components"]._options = None _CLUSTERMETRICS_HDFSMETRICSENTRY._options = None _CLUSTERMETRICS_YARNMETRICSENTRY._options = None +_CREATECLUSTERREQUEST.fields_by_name["project_id"]._options = None +_CREATECLUSTERREQUEST.fields_by_name["region"]._options = None +_CREATECLUSTERREQUEST.fields_by_name["cluster"]._options = None +_CREATECLUSTERREQUEST.fields_by_name["request_id"]._options = None +_UPDATECLUSTERREQUEST.fields_by_name["project_id"]._options = None +_UPDATECLUSTERREQUEST.fields_by_name["region"]._options = None +_UPDATECLUSTERREQUEST.fields_by_name["cluster_name"]._options = None +_UPDATECLUSTERREQUEST.fields_by_name["cluster"]._options = None +_UPDATECLUSTERREQUEST.fields_by_name["graceful_decommission_timeout"]._options = None +_UPDATECLUSTERREQUEST.fields_by_name["update_mask"]._options = None +_UPDATECLUSTERREQUEST.fields_by_name["request_id"]._options = None +_DELETECLUSTERREQUEST.fields_by_name["project_id"]._options = None +_DELETECLUSTERREQUEST.fields_by_name["region"]._options = None +_DELETECLUSTERREQUEST.fields_by_name["cluster_name"]._options = None +_DELETECLUSTERREQUEST.fields_by_name["cluster_uuid"]._options = None +_DELETECLUSTERREQUEST.fields_by_name["request_id"]._options = None +_GETCLUSTERREQUEST.fields_by_name["project_id"]._options = None +_GETCLUSTERREQUEST.fields_by_name["region"]._options = None +_GETCLUSTERREQUEST.fields_by_name["cluster_name"]._options = None +_LISTCLUSTERSREQUEST.fields_by_name["project_id"]._options = None +_LISTCLUSTERSREQUEST.fields_by_name["region"]._options = None +_LISTCLUSTERSREQUEST.fields_by_name["filter"]._options = None +_LISTCLUSTERSREQUEST.fields_by_name["page_size"]._options = None +_LISTCLUSTERSREQUEST.fields_by_name["page_token"]._options = None +_LISTCLUSTERSRESPONSE.fields_by_name["clusters"]._options = None +_LISTCLUSTERSRESPONSE.fields_by_name["next_page_token"]._options = None +_DIAGNOSECLUSTERREQUEST.fields_by_name["project_id"]._options = None +_DIAGNOSECLUSTERREQUEST.fields_by_name["region"]._options = None +_DIAGNOSECLUSTERREQUEST.fields_by_name["cluster_name"]._options = None +_DIAGNOSECLUSTERRESULTS.fields_by_name["output_uri"]._options = None _CLUSTERCONTROLLER = _descriptor.ServiceDescriptor( name="ClusterController", full_name="google.cloud.dataproc.v1.ClusterController", file=DESCRIPTOR, index=0, - serialized_options=None, - serialized_start=4219, - serialized_end=5293, + serialized_options=_b( + "\312A\027dataproc.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform" + ), + serialized_start=5480, + serialized_end=7062, methods=[ _descriptor.MethodDescriptor( name="CreateCluster", @@ -3164,7 +3816,7 @@ input_type=_CREATECLUSTERREQUEST, output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, serialized_options=_b( - '\202\323\344\223\002>"3/v1/projects/{project_id}/regions/{region}/clusters:\007cluster' + '\202\323\344\223\002>"3/v1/projects/{project_id}/regions/{region}/clusters:\007cluster\332A\031project_id,region,cluster\312A<\n\007Cluster\0221google.cloud.dataproc.v1.ClusterOperationMetadata' ), ), _descriptor.MethodDescriptor( @@ -3175,7 +3827,7 @@ input_type=_UPDATECLUSTERREQUEST, output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, serialized_options=_b( - "\202\323\344\223\002M2B/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}:\007cluster" + "\202\323\344\223\002M2B/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}:\007cluster\312A<\n\007Cluster\0221google.cloud.dataproc.v1.ClusterOperationMetadata" ), ), _descriptor.MethodDescriptor( @@ -3186,7 +3838,7 @@ input_type=_DELETECLUSTERREQUEST, output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, serialized_options=_b( - "\202\323\344\223\002D*B/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}" + "\202\323\344\223\002D*B/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}\332A\036project_id,region,cluster_name\312AJ\n\025google.protobuf.Empty\0221google.cloud.dataproc.v1.ClusterOperationMetadata" ), ), _descriptor.MethodDescriptor( @@ -3197,7 +3849,7 @@ input_type=_GETCLUSTERREQUEST, output_type=_CLUSTER, serialized_options=_b( - "\202\323\344\223\002D\022B/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}" + "\202\323\344\223\002D\022B/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}\332A\036project_id,region,cluster_name" ), ), _descriptor.MethodDescriptor( @@ -3208,7 +3860,7 @@ input_type=_LISTCLUSTERSREQUEST, output_type=_LISTCLUSTERSRESPONSE, serialized_options=_b( - "\202\323\344\223\0025\0223/v1/projects/{project_id}/regions/{region}/clusters" + "\202\323\344\223\0025\0223/v1/projects/{project_id}/regions/{region}/clusters\332A\021project_id,region\332A\030project_id,region,filter" ), ), _descriptor.MethodDescriptor( @@ -3219,7 +3871,7 @@ input_type=_DIAGNOSECLUSTERREQUEST, output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, serialized_options=_b( - '\202\323\344\223\002P"K/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}:diagnose:\001*' + '\202\323\344\223\002P"K/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}:diagnose:\001*\332A\036project_id,region,cluster_name\312A/\n\025google.protobuf.Empty\022\026DiagnoseClusterResults' ), ), ], diff --git a/dataproc/google/cloud/dataproc_v1/proto/clusters_pb2_grpc.py b/dataproc/google/cloud/dataproc_v1/proto/clusters_pb2_grpc.py index 5d4b275b8ab0..def69f148416 100644 --- a/dataproc/google/cloud/dataproc_v1/proto/clusters_pb2_grpc.py +++ b/dataproc/google/cloud/dataproc_v1/proto/clusters_pb2_grpc.py @@ -58,21 +58,27 @@ class ClusterControllerServicer(object): """ def CreateCluster(self, request, context): - """Creates a cluster in a project. + """Creates a cluster in a project. The returned + [Operation.metadata][google.longrunning.Operation.metadata] will be + [ClusterOperationMetadata](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata). """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def UpdateCluster(self, request, context): - """Updates a cluster in a project. + """Updates a cluster in a project. The returned + [Operation.metadata][google.longrunning.Operation.metadata] will be + [ClusterOperationMetadata](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata). """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def DeleteCluster(self, request, context): - """Deletes a cluster in a project. + """Deletes a cluster in a project. The returned + [Operation.metadata][google.longrunning.Operation.metadata] will be + [ClusterOperationMetadata](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata). """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") @@ -93,9 +99,13 @@ def ListClusters(self, request, context): raise NotImplementedError("Method not implemented!") def DiagnoseCluster(self, request, context): - """Gets cluster diagnostic information. - After the operation completes, the Operation.response field - contains `DiagnoseClusterOutputLocation`. + """Gets cluster diagnostic information. The returned + [Operation.metadata][google.longrunning.Operation.metadata] will be + [ClusterOperationMetadata](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata). + After the operation completes, + [Operation.response][google.longrunning.Operation.response] + contains + [DiagnoseClusterResults](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#diagnoseclusterresults). """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") diff --git a/dataproc/google/cloud/dataproc_v1/proto/jobs.proto b/dataproc/google/cloud/dataproc_v1/proto/jobs.proto index 7ead7bb95ca0..eeba155deebe 100644 --- a/dataproc/google/cloud/dataproc_v1/proto/jobs.proto +++ b/dataproc/google/cloud/dataproc_v1/proto/jobs.proto @@ -18,6 +18,8 @@ syntax = "proto3"; package google.cloud.dataproc.v1; import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; import "google/protobuf/empty.proto"; import "google/protobuf/field_mask.proto"; import "google/protobuf/timestamp.proto"; @@ -29,12 +31,16 @@ option java_package = "com.google.cloud.dataproc.v1"; // The JobController provides methods to manage jobs. service JobController { + option (google.api.default_host) = "dataproc.googleapis.com"; + option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; + // Submits a job to a cluster. rpc SubmitJob(SubmitJobRequest) returns (Job) { option (google.api.http) = { post: "/v1/projects/{project_id}/regions/{region}/jobs:submit" body: "*" }; + option (google.api.method_signature) = "project_id,region,job"; } // Gets the resource representation for a job in a project. @@ -42,6 +48,7 @@ service JobController { option (google.api.http) = { get: "/v1/projects/{project_id}/regions/{region}/jobs/{job_id}" }; + option (google.api.method_signature) = "project_id,region,job_id"; } // Lists regions/{region}/jobs in a project. @@ -49,6 +56,8 @@ service JobController { option (google.api.http) = { get: "/v1/projects/{project_id}/regions/{region}/jobs" }; + option (google.api.method_signature) = "project_id,region"; + option (google.api.method_signature) = "project_id,region,filter"; } // Updates a job in a project. @@ -69,6 +78,7 @@ service JobController { post: "/v1/projects/{project_id}/regions/{region}/jobs/{job_id}:cancel" body: "*" }; + option (google.api.method_signature) = "project_id,region,job_id"; } // Deletes the job from the project. If the job is active, the delete fails, @@ -77,6 +87,7 @@ service JobController { option (google.api.http) = { delete: "/v1/projects/{project_id}/regions/{region}/jobs/{job_id}" }; + option (google.api.method_signature) = "project_id,region,job_id"; } } @@ -148,30 +159,30 @@ message HadoopJob { // include arguments, such as `-libjars` or `-Dfoo=bar`, that can be set as // job properties, since a collision may occur that causes an incorrect job // submission. - repeated string args = 3; + repeated string args = 3 [(google.api.field_behavior) = OPTIONAL]; // Optional. Jar file URIs to add to the CLASSPATHs of the // Hadoop driver and tasks. - repeated string jar_file_uris = 4; + repeated string jar_file_uris = 4 [(google.api.field_behavior) = OPTIONAL]; // Optional. HCFS (Hadoop Compatible Filesystem) URIs of files to be copied // to the working directory of Hadoop drivers and distributed tasks. Useful // for naively parallel tasks. - repeated string file_uris = 5; + repeated string file_uris = 5 [(google.api.field_behavior) = OPTIONAL]; // Optional. HCFS URIs of archives to be extracted in the working directory of // Hadoop drivers and tasks. Supported file types: // .jar, .tar, .tar.gz, .tgz, or .zip. - repeated string archive_uris = 6; + repeated string archive_uris = 6 [(google.api.field_behavior) = OPTIONAL]; // Optional. A mapping of property names to values, used to configure Hadoop. // Properties that conflict with values set by the Cloud Dataproc API may be // overwritten. Can include properties set in /etc/hadoop/conf/*-site and // classes in user code. - map properties = 7; + map properties = 7 [(google.api.field_behavior) = OPTIONAL]; // Optional. The runtime log config for job execution. - LoggingConfig logging_config = 8; + LoggingConfig logging_config = 8 [(google.api.field_behavior) = OPTIONAL]; } // A Cloud Dataproc job for running [Apache Spark](http://spark.apache.org/) @@ -194,29 +205,29 @@ message SparkJob { // Optional. The arguments to pass to the driver. Do not include arguments, // such as `--conf`, that can be set as job properties, since a collision may // occur that causes an incorrect job submission. - repeated string args = 3; + repeated string args = 3 [(google.api.field_behavior) = OPTIONAL]; // Optional. HCFS URIs of jar files to add to the CLASSPATHs of the // Spark driver and tasks. - repeated string jar_file_uris = 4; + repeated string jar_file_uris = 4 [(google.api.field_behavior) = OPTIONAL]; // Optional. HCFS URIs of files to be copied to the working directory of // Spark drivers and distributed tasks. Useful for naively parallel tasks. - repeated string file_uris = 5; + repeated string file_uris = 5 [(google.api.field_behavior) = OPTIONAL]; // Optional. HCFS URIs of archives to be extracted in the working directory // of Spark drivers and tasks. Supported file types: // .jar, .tar, .tar.gz, .tgz, and .zip. - repeated string archive_uris = 6; + repeated string archive_uris = 6 [(google.api.field_behavior) = OPTIONAL]; // Optional. A mapping of property names to values, used to configure Spark. // Properties that conflict with values set by the Cloud Dataproc API may be // overwritten. Can include properties set in // /etc/spark/conf/spark-defaults.conf and classes in user code. - map properties = 7; + map properties = 7 [(google.api.field_behavior) = OPTIONAL]; // Optional. The runtime log config for job execution. - LoggingConfig logging_config = 8; + LoggingConfig logging_config = 8 [(google.api.field_behavior) = OPTIONAL]; } // A Cloud Dataproc job for running @@ -226,37 +237,37 @@ message SparkJob { message PySparkJob { // Required. The HCFS URI of the main Python file to use as the driver. Must // be a .py file. - string main_python_file_uri = 1; + string main_python_file_uri = 1 [(google.api.field_behavior) = REQUIRED]; // Optional. The arguments to pass to the driver. Do not include arguments, // such as `--conf`, that can be set as job properties, since a collision may // occur that causes an incorrect job submission. - repeated string args = 2; + repeated string args = 2 [(google.api.field_behavior) = OPTIONAL]; // Optional. HCFS file URIs of Python files to pass to the PySpark // framework. Supported file types: .py, .egg, and .zip. - repeated string python_file_uris = 3; + repeated string python_file_uris = 3 [(google.api.field_behavior) = OPTIONAL]; // Optional. HCFS URIs of jar files to add to the CLASSPATHs of the // Python driver and tasks. - repeated string jar_file_uris = 4; + repeated string jar_file_uris = 4 [(google.api.field_behavior) = OPTIONAL]; // Optional. HCFS URIs of files to be copied to the working directory of // Python drivers and distributed tasks. Useful for naively parallel tasks. - repeated string file_uris = 5; + repeated string file_uris = 5 [(google.api.field_behavior) = OPTIONAL]; // Optional. HCFS URIs of archives to be extracted in the working directory of // .jar, .tar, .tar.gz, .tgz, and .zip. - repeated string archive_uris = 6; + repeated string archive_uris = 6 [(google.api.field_behavior) = OPTIONAL]; // Optional. A mapping of property names to values, used to configure PySpark. // Properties that conflict with values set by the Cloud Dataproc API may be // overwritten. Can include properties set in // /etc/spark/conf/spark-defaults.conf and classes in user code. - map properties = 7; + map properties = 7 [(google.api.field_behavior) = OPTIONAL]; // Optional. The runtime log config for job execution. - LoggingConfig logging_config = 8; + LoggingConfig logging_config = 8 [(google.api.field_behavior) = OPTIONAL]; } // A list of queries to run on a cluster. @@ -275,7 +286,7 @@ message QueryList { // ] // } // } - repeated string queries = 1; + repeated string queries = 1 [(google.api.field_behavior) = REQUIRED]; } // A Cloud Dataproc job for running [Apache Hive](https://hive.apache.org/) @@ -294,22 +305,22 @@ message HiveJob { // Optional. Whether to continue executing queries if a query fails. // The default value is `false`. Setting to `true` can be useful when // executing independent parallel queries. - bool continue_on_failure = 3; + bool continue_on_failure = 3 [(google.api.field_behavior) = OPTIONAL]; // Optional. Mapping of query variable names to values (equivalent to the // Hive command: `SET name="value";`). - map script_variables = 4; + map script_variables = 4 [(google.api.field_behavior) = OPTIONAL]; // Optional. A mapping of property names and values, used to configure Hive. // Properties that conflict with values set by the Cloud Dataproc API may be // overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml, // /etc/hive/conf/hive-site.xml, and classes in user code. - map properties = 5; + map properties = 5 [(google.api.field_behavior) = OPTIONAL]; // Optional. HCFS URIs of jar files to add to the CLASSPATH of the // Hive server and Hadoop MapReduce (MR) tasks. Can contain Hive SerDes // and UDFs. - repeated string jar_file_uris = 6; + repeated string jar_file_uris = 6 [(google.api.field_behavior) = OPTIONAL]; } // A Cloud Dataproc job for running [Apache Spark @@ -327,18 +338,18 @@ message SparkSqlJob { // Optional. Mapping of query variable names to values (equivalent to the // Spark SQL command: SET `name="value";`). - map script_variables = 3; + map script_variables = 3 [(google.api.field_behavior) = OPTIONAL]; // Optional. A mapping of property names to values, used to configure // Spark SQL's SparkConf. Properties that conflict with values set by the // Cloud Dataproc API may be overwritten. - map properties = 4; + map properties = 4 [(google.api.field_behavior) = OPTIONAL]; // Optional. HCFS URIs of jar files to be added to the Spark CLASSPATH. - repeated string jar_file_uris = 56; + repeated string jar_file_uris = 56 [(google.api.field_behavior) = OPTIONAL]; // Optional. The runtime log config for job execution. - LoggingConfig logging_config = 6; + LoggingConfig logging_config = 6 [(google.api.field_behavior) = OPTIONAL]; } // A Cloud Dataproc job for running [Apache Pig](https://pig.apache.org/) @@ -357,34 +368,34 @@ message PigJob { // Optional. Whether to continue executing queries if a query fails. // The default value is `false`. Setting to `true` can be useful when // executing independent parallel queries. - bool continue_on_failure = 3; + bool continue_on_failure = 3 [(google.api.field_behavior) = OPTIONAL]; // Optional. Mapping of query variable names to values (equivalent to the Pig // command: `name=[value]`). - map script_variables = 4; + map script_variables = 4 [(google.api.field_behavior) = OPTIONAL]; // Optional. A mapping of property names to values, used to configure Pig. // Properties that conflict with values set by the Cloud Dataproc API may be // overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml, // /etc/pig/conf/pig.properties, and classes in user code. - map properties = 5; + map properties = 5 [(google.api.field_behavior) = OPTIONAL]; // Optional. HCFS URIs of jar files to add to the CLASSPATH of // the Pig Client and Hadoop MapReduce (MR) tasks. Can contain Pig UDFs. - repeated string jar_file_uris = 6; + repeated string jar_file_uris = 6 [(google.api.field_behavior) = OPTIONAL]; // Optional. The runtime log config for job execution. - LoggingConfig logging_config = 7; + LoggingConfig logging_config = 7 [(google.api.field_behavior) = OPTIONAL]; } // Cloud Dataproc job config. message JobPlacement { // Required. The name of the cluster where the job will be submitted. - string cluster_name = 1; + string cluster_name = 1 [(google.api.field_behavior) = REQUIRED]; // Output only. A cluster UUID generated by the Cloud Dataproc service when // the job is submitted. - string cluster_uuid = 2; + string cluster_uuid = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; } // Cloud Dataproc job status. @@ -453,25 +464,28 @@ message JobStatus { } // Output only. A state message specifying the overall job state. - State state = 1; + State state = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; - // Output only. Optional job state details, such as an error + // Optional. Output only. Job state details, such as an error // description if the state is ERROR. - string details = 2; + string details = 2 [ + (google.api.field_behavior) = OUTPUT_ONLY, + (google.api.field_behavior) = OPTIONAL + ]; // Output only. The time when this state was entered. - google.protobuf.Timestamp state_start_time = 6; + google.protobuf.Timestamp state_start_time = 6 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. Additional state information, which includes // status reported by the agent. - Substate substate = 7; + Substate substate = 7 [(google.api.field_behavior) = OUTPUT_ONLY]; } // Encapsulates the full scoping used to reference a job. message JobReference { // Required. The ID of the Google Cloud Platform project that the job // belongs to. - string project_id = 1; + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; // Optional. The job ID, which must be unique within the project. // @@ -479,7 +493,7 @@ message JobReference { // underscores (_), or hyphens (-). The maximum length is 100 characters. // // If not specified by the caller, the job ID will be provided by the server. - string job_id = 2; + string job_id = 2 [(google.api.field_behavior) = OPTIONAL]; } // A YARN application created by a job. Application information is a subset of @@ -520,19 +534,19 @@ message YarnApplication { } // Required. The application name. - string name = 1; + string name = 1 [(google.api.field_behavior) = REQUIRED]; // Required. The application state. - State state = 2; + State state = 2 [(google.api.field_behavior) = REQUIRED]; // Required. The numerical progress of the application, from 1 to 100. - float progress = 3; + float progress = 3 [(google.api.field_behavior) = REQUIRED]; // Optional. The HTTP URL of the ApplicationMaster, HistoryServer, or // TimelineServer that provides application-specific information. The URL uses // the internal hostname, and requires a proxy server for resolution and, // possibly, access. - string tracking_url = 4; + string tracking_url = 4 [(google.api.field_behavior) = OPTIONAL]; } // A Cloud Dataproc job resource. @@ -541,11 +555,11 @@ message Job { // obtain the equivalent REST path of the job resource. If this property // is not specified when a job is created, the server generates a // job_id. - JobReference reference = 1; + JobReference reference = 1 [(google.api.field_behavior) = OPTIONAL]; // Required. Job information, including how, when, and where to // run the job. - JobPlacement placement = 2; + JobPlacement placement = 2 [(google.api.field_behavior) = REQUIRED]; // Required. The application/framework-specific portion of the job. oneof type_job { @@ -571,25 +585,25 @@ message Job { // Output only. The job status. Additional application-specific // status information may be contained in the type_job // and yarn_applications fields. - JobStatus status = 8; + JobStatus status = 8 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The previous job status. - repeated JobStatus status_history = 13; + repeated JobStatus status_history = 13 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The collection of YARN applications spun up by this job. // // **Beta** Feature: This report is available for testing purposes only. It // may be changed before final release. - repeated YarnApplication yarn_applications = 9; + repeated YarnApplication yarn_applications = 9 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. A URI pointing to the location of the stdout of the job's // driver program. - string driver_output_resource_uri = 17; + string driver_output_resource_uri = 17 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. If present, the location of miscellaneous control files // which may be used as part of job setup and handling. If not present, // control files may be placed in the same location as `driver_output_uri`. - string driver_control_files_uri = 15; + string driver_control_files_uri = 15 [(google.api.field_behavior) = OUTPUT_ONLY]; // Optional. The labels to associate with this job. // Label **keys** must contain 1 to 63 characters, and must conform to @@ -598,15 +612,15 @@ message Job { // characters, and must conform to [RFC // 1035](https://www.ietf.org/rfc/rfc1035.txt). No more than 32 labels can be // associated with a job. - map labels = 18; + map labels = 18 [(google.api.field_behavior) = OPTIONAL]; // Optional. Job scheduling configuration. - JobScheduling scheduling = 20; + JobScheduling scheduling = 20 [(google.api.field_behavior) = OPTIONAL]; // Output only. A UUID that uniquely identifies a job within the project // over time. This is in contrast to a user-settable reference.job_id that // may be reused over time. - string job_uuid = 22; + string job_uuid = 22 [(google.api.field_behavior) = OUTPUT_ONLY]; } // Job scheduling options. @@ -619,20 +633,20 @@ message JobScheduling { // 4 times within 10 minute window. // // Maximum value is 10. - int32 max_failures_per_hour = 1; + int32 max_failures_per_hour = 1 [(google.api.field_behavior) = OPTIONAL]; } // A request to submit a job. message SubmitJobRequest { // Required. The ID of the Google Cloud Platform project that the job // belongs to. - string project_id = 1; + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; // Required. The Cloud Dataproc region in which to handle the request. - string region = 3; + string region = 3 [(google.api.field_behavior) = REQUIRED]; // Required. The job resource. - Job job = 2; + Job job = 2 [(google.api.field_behavior) = REQUIRED]; // Optional. A unique id used to identify the request. If the server // receives two [SubmitJobRequest][google.cloud.dataproc.v1.SubmitJobRequest] requests with the same @@ -645,20 +659,20 @@ message SubmitJobRequest { // // The id must contain only letters (a-z, A-Z), numbers (0-9), // underscores (_), and hyphens (-). The maximum length is 40 characters. - string request_id = 4; + string request_id = 4 [(google.api.field_behavior) = OPTIONAL]; } // A request to get the resource representation for a job in a project. message GetJobRequest { // Required. The ID of the Google Cloud Platform project that the job // belongs to. - string project_id = 1; + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; // Required. The Cloud Dataproc region in which to handle the request. - string region = 3; + string region = 3 [(google.api.field_behavior) = REQUIRED]; // Required. The job ID. - string job_id = 2; + string job_id = 2 [(google.api.field_behavior) = REQUIRED]; } // A request to list jobs in a project. @@ -678,27 +692,27 @@ message ListJobsRequest { // Required. The ID of the Google Cloud Platform project that the job // belongs to. - string project_id = 1; + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; // Required. The Cloud Dataproc region in which to handle the request. - string region = 6; + string region = 6 [(google.api.field_behavior) = REQUIRED]; // Optional. The number of results to return in each response. - int32 page_size = 2; + int32 page_size = 2 [(google.api.field_behavior) = OPTIONAL]; // Optional. The page token, returned by a previous call, to request the // next page of results. - string page_token = 3; + string page_token = 3 [(google.api.field_behavior) = OPTIONAL]; // Optional. If set, the returned jobs list includes only jobs that were // submitted to the named cluster. - string cluster_name = 4; + string cluster_name = 4 [(google.api.field_behavior) = OPTIONAL]; // Optional. Specifies enumerated categories of jobs to list. // (default = match ALL jobs). // // If `filter` is provided, `jobStateMatcher` will be ignored. - JobStateMatcher job_state_matcher = 5; + JobStateMatcher job_state_matcher = 5 [(google.api.field_behavior) = OPTIONAL]; // Optional. A filter constraining the jobs to list. Filters are // case-sensitive and have the following syntax: @@ -714,23 +728,23 @@ message ListJobsRequest { // Example filter: // // status.state = ACTIVE AND labels.env = staging AND labels.starred = * - string filter = 7; + string filter = 7 [(google.api.field_behavior) = OPTIONAL]; } // A request to update a job. message UpdateJobRequest { // Required. The ID of the Google Cloud Platform project that the job // belongs to. - string project_id = 1; + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; // Required. The Cloud Dataproc region in which to handle the request. - string region = 2; + string region = 2 [(google.api.field_behavior) = REQUIRED]; // Required. The job ID. - string job_id = 3; + string job_id = 3 [(google.api.field_behavior) = REQUIRED]; // Required. The changes to the job. - Job job = 4; + Job job = 4 [(google.api.field_behavior) = REQUIRED]; // Required. Specifies the path, relative to Job, of // the field to update. For example, to update the labels of a Job the @@ -738,42 +752,42 @@ message UpdateJobRequest { // labels, and the `PATCH` request body would specify the new // value. Note: Currently, labels is the only // field that can be updated. - google.protobuf.FieldMask update_mask = 5; + google.protobuf.FieldMask update_mask = 5 [(google.api.field_behavior) = REQUIRED]; } // A list of jobs in a project. message ListJobsResponse { // Output only. Jobs list. - repeated Job jobs = 1; + repeated Job jobs = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; // Optional. This token is included in the response if there are more results // to fetch. To fetch additional results, provide this value as the // `page_token` in a subsequent ListJobsRequest. - string next_page_token = 2; + string next_page_token = 2 [(google.api.field_behavior) = OPTIONAL]; } // A request to cancel a job. message CancelJobRequest { // Required. The ID of the Google Cloud Platform project that the job // belongs to. - string project_id = 1; + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; // Required. The Cloud Dataproc region in which to handle the request. - string region = 3; + string region = 3 [(google.api.field_behavior) = REQUIRED]; // Required. The job ID. - string job_id = 2; + string job_id = 2 [(google.api.field_behavior) = REQUIRED]; } // A request to delete a job. message DeleteJobRequest { // Required. The ID of the Google Cloud Platform project that the job // belongs to. - string project_id = 1; + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; // Required. The Cloud Dataproc region in which to handle the request. - string region = 3; + string region = 3 [(google.api.field_behavior) = REQUIRED]; // Required. The job ID. - string job_id = 2; + string job_id = 2 [(google.api.field_behavior) = REQUIRED]; } diff --git a/dataproc/google/cloud/dataproc_v1/proto/jobs_pb2.py b/dataproc/google/cloud/dataproc_v1/proto/jobs_pb2.py index 67011adb07f3..294c5acca05e 100644 --- a/dataproc/google/cloud/dataproc_v1/proto/jobs_pb2.py +++ b/dataproc/google/cloud/dataproc_v1/proto/jobs_pb2.py @@ -16,6 +16,8 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 @@ -29,10 +31,12 @@ "\n\034com.google.cloud.dataproc.v1B\tJobsProtoP\001Z@google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataproc" ), serialized_pb=_b( - '\n)google/cloud/dataproc_v1/proto/jobs.proto\x12\x18google.cloud.dataproc.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xc1\x02\n\rLoggingConfig\x12W\n\x11\x64river_log_levels\x18\x02 \x03(\x0b\x32<.google.cloud.dataproc.v1.LoggingConfig.DriverLogLevelsEntry\x1a\x65\n\x14\x44riverLogLevelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12<\n\x05value\x18\x02 \x01(\x0e\x32-.google.cloud.dataproc.v1.LoggingConfig.Level:\x02\x38\x01"p\n\x05Level\x12\x15\n\x11LEVEL_UNSPECIFIED\x10\x00\x12\x07\n\x03\x41LL\x10\x01\x12\t\n\x05TRACE\x10\x02\x12\t\n\x05\x44\x45\x42UG\x10\x03\x12\x08\n\x04INFO\x10\x04\x12\x08\n\x04WARN\x10\x05\x12\t\n\x05\x45RROR\x10\x06\x12\t\n\x05\x46\x41TAL\x10\x07\x12\x07\n\x03OFF\x10\x08"\xd3\x02\n\tHadoopJob\x12\x1b\n\x11main_jar_file_uri\x18\x01 \x01(\tH\x00\x12\x14\n\nmain_class\x18\x02 \x01(\tH\x00\x12\x0c\n\x04\x61rgs\x18\x03 \x03(\t\x12\x15\n\rjar_file_uris\x18\x04 \x03(\t\x12\x11\n\tfile_uris\x18\x05 \x03(\t\x12\x14\n\x0c\x61rchive_uris\x18\x06 \x03(\t\x12G\n\nproperties\x18\x07 \x03(\x0b\x32\x33.google.cloud.dataproc.v1.HadoopJob.PropertiesEntry\x12?\n\x0elogging_config\x18\x08 \x01(\x0b\x32\'.google.cloud.dataproc.v1.LoggingConfig\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x08\n\x06\x64river"\xd1\x02\n\x08SparkJob\x12\x1b\n\x11main_jar_file_uri\x18\x01 \x01(\tH\x00\x12\x14\n\nmain_class\x18\x02 \x01(\tH\x00\x12\x0c\n\x04\x61rgs\x18\x03 \x03(\t\x12\x15\n\rjar_file_uris\x18\x04 \x03(\t\x12\x11\n\tfile_uris\x18\x05 \x03(\t\x12\x14\n\x0c\x61rchive_uris\x18\x06 \x03(\t\x12\x46\n\nproperties\x18\x07 \x03(\x0b\x32\x32.google.cloud.dataproc.v1.SparkJob.PropertiesEntry\x12?\n\x0elogging_config\x18\x08 \x01(\x0b\x32\'.google.cloud.dataproc.v1.LoggingConfig\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x08\n\x06\x64river"\xd0\x02\n\nPySparkJob\x12\x1c\n\x14main_python_file_uri\x18\x01 \x01(\t\x12\x0c\n\x04\x61rgs\x18\x02 \x03(\t\x12\x18\n\x10python_file_uris\x18\x03 \x03(\t\x12\x15\n\rjar_file_uris\x18\x04 \x03(\t\x12\x11\n\tfile_uris\x18\x05 \x03(\t\x12\x14\n\x0c\x61rchive_uris\x18\x06 \x03(\t\x12H\n\nproperties\x18\x07 \x03(\x0b\x32\x34.google.cloud.dataproc.v1.PySparkJob.PropertiesEntry\x12?\n\x0elogging_config\x18\x08 \x01(\x0b\x32\'.google.cloud.dataproc.v1.LoggingConfig\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x1c\n\tQueryList\x12\x0f\n\x07queries\x18\x01 \x03(\t"\xa1\x03\n\x07HiveJob\x12\x18\n\x0equery_file_uri\x18\x01 \x01(\tH\x00\x12\x39\n\nquery_list\x18\x02 \x01(\x0b\x32#.google.cloud.dataproc.v1.QueryListH\x00\x12\x1b\n\x13\x63ontinue_on_failure\x18\x03 \x01(\x08\x12P\n\x10script_variables\x18\x04 \x03(\x0b\x32\x36.google.cloud.dataproc.v1.HiveJob.ScriptVariablesEntry\x12\x45\n\nproperties\x18\x05 \x03(\x0b\x32\x31.google.cloud.dataproc.v1.HiveJob.PropertiesEntry\x12\x15\n\rjar_file_uris\x18\x06 \x03(\t\x1a\x36\n\x14ScriptVariablesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\t\n\x07queries"\xd1\x03\n\x0bSparkSqlJob\x12\x18\n\x0equery_file_uri\x18\x01 \x01(\tH\x00\x12\x39\n\nquery_list\x18\x02 \x01(\x0b\x32#.google.cloud.dataproc.v1.QueryListH\x00\x12T\n\x10script_variables\x18\x03 \x03(\x0b\x32:.google.cloud.dataproc.v1.SparkSqlJob.ScriptVariablesEntry\x12I\n\nproperties\x18\x04 \x03(\x0b\x32\x35.google.cloud.dataproc.v1.SparkSqlJob.PropertiesEntry\x12\x15\n\rjar_file_uris\x18\x38 \x03(\t\x12?\n\x0elogging_config\x18\x06 \x01(\x0b\x32\'.google.cloud.dataproc.v1.LoggingConfig\x1a\x36\n\x14ScriptVariablesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\t\n\x07queries"\xdf\x03\n\x06PigJob\x12\x18\n\x0equery_file_uri\x18\x01 \x01(\tH\x00\x12\x39\n\nquery_list\x18\x02 \x01(\x0b\x32#.google.cloud.dataproc.v1.QueryListH\x00\x12\x1b\n\x13\x63ontinue_on_failure\x18\x03 \x01(\x08\x12O\n\x10script_variables\x18\x04 \x03(\x0b\x32\x35.google.cloud.dataproc.v1.PigJob.ScriptVariablesEntry\x12\x44\n\nproperties\x18\x05 \x03(\x0b\x32\x30.google.cloud.dataproc.v1.PigJob.PropertiesEntry\x12\x15\n\rjar_file_uris\x18\x06 \x03(\t\x12?\n\x0elogging_config\x18\x07 \x01(\x0b\x32\'.google.cloud.dataproc.v1.LoggingConfig\x1a\x36\n\x14ScriptVariablesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\t\n\x07queries":\n\x0cJobPlacement\x12\x14\n\x0c\x63luster_name\x18\x01 \x01(\t\x12\x14\n\x0c\x63luster_uuid\x18\x02 \x01(\t"\xc2\x03\n\tJobStatus\x12\x38\n\x05state\x18\x01 \x01(\x0e\x32).google.cloud.dataproc.v1.JobStatus.State\x12\x0f\n\x07\x64\x65tails\x18\x02 \x01(\t\x12\x34\n\x10state_start_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12>\n\x08substate\x18\x07 \x01(\x0e\x32,.google.cloud.dataproc.v1.JobStatus.Substate"\xa9\x01\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07PENDING\x10\x01\x12\x0e\n\nSETUP_DONE\x10\x08\x12\x0b\n\x07RUNNING\x10\x02\x12\x12\n\x0e\x43\x41NCEL_PENDING\x10\x03\x12\x12\n\x0e\x43\x41NCEL_STARTED\x10\x07\x12\r\n\tCANCELLED\x10\x04\x12\x08\n\x04\x44ONE\x10\x05\x12\t\n\x05\x45RROR\x10\x06\x12\x13\n\x0f\x41TTEMPT_FAILURE\x10\t"H\n\x08Substate\x12\x0f\n\x0bUNSPECIFIED\x10\x00\x12\r\n\tSUBMITTED\x10\x01\x12\n\n\x06QUEUED\x10\x02\x12\x10\n\x0cSTALE_STATUS\x10\x03"2\n\x0cJobReference\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x0e\n\x06job_id\x18\x02 \x01(\t"\x91\x02\n\x0fYarnApplication\x12\x0c\n\x04name\x18\x01 \x01(\t\x12>\n\x05state\x18\x02 \x01(\x0e\x32/.google.cloud.dataproc.v1.YarnApplication.State\x12\x10\n\x08progress\x18\x03 \x01(\x02\x12\x14\n\x0ctracking_url\x18\x04 \x01(\t"\x87\x01\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x07\n\x03NEW\x10\x01\x12\x0e\n\nNEW_SAVING\x10\x02\x12\r\n\tSUBMITTED\x10\x03\x12\x0c\n\x08\x41\x43\x43\x45PTED\x10\x04\x12\x0b\n\x07RUNNING\x10\x05\x12\x0c\n\x08\x46INISHED\x10\x06\x12\n\n\x06\x46\x41ILED\x10\x07\x12\n\n\x06KILLED\x10\x08"\x9b\x07\n\x03Job\x12\x39\n\treference\x18\x01 \x01(\x0b\x32&.google.cloud.dataproc.v1.JobReference\x12\x39\n\tplacement\x18\x02 \x01(\x0b\x32&.google.cloud.dataproc.v1.JobPlacement\x12\x39\n\nhadoop_job\x18\x03 \x01(\x0b\x32#.google.cloud.dataproc.v1.HadoopJobH\x00\x12\x37\n\tspark_job\x18\x04 \x01(\x0b\x32".google.cloud.dataproc.v1.SparkJobH\x00\x12;\n\x0bpyspark_job\x18\x05 \x01(\x0b\x32$.google.cloud.dataproc.v1.PySparkJobH\x00\x12\x35\n\x08hive_job\x18\x06 \x01(\x0b\x32!.google.cloud.dataproc.v1.HiveJobH\x00\x12\x33\n\x07pig_job\x18\x07 \x01(\x0b\x32 .google.cloud.dataproc.v1.PigJobH\x00\x12>\n\rspark_sql_job\x18\x0c \x01(\x0b\x32%.google.cloud.dataproc.v1.SparkSqlJobH\x00\x12\x33\n\x06status\x18\x08 \x01(\x0b\x32#.google.cloud.dataproc.v1.JobStatus\x12;\n\x0estatus_history\x18\r \x03(\x0b\x32#.google.cloud.dataproc.v1.JobStatus\x12\x44\n\x11yarn_applications\x18\t \x03(\x0b\x32).google.cloud.dataproc.v1.YarnApplication\x12"\n\x1a\x64river_output_resource_uri\x18\x11 \x01(\t\x12 \n\x18\x64river_control_files_uri\x18\x0f \x01(\t\x12\x39\n\x06labels\x18\x12 \x03(\x0b\x32).google.cloud.dataproc.v1.Job.LabelsEntry\x12;\n\nscheduling\x18\x14 \x01(\x0b\x32\'.google.cloud.dataproc.v1.JobScheduling\x12\x10\n\x08job_uuid\x18\x16 \x01(\t\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\n\n\x08type_job".\n\rJobScheduling\x12\x1d\n\x15max_failures_per_hour\x18\x01 \x01(\x05"v\n\x10SubmitJobRequest\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x0e\n\x06region\x18\x03 \x01(\t\x12*\n\x03job\x18\x02 \x01(\x0b\x32\x1d.google.cloud.dataproc.v1.Job\x12\x12\n\nrequest_id\x18\x04 \x01(\t"C\n\rGetJobRequest\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x0e\n\x06region\x18\x03 \x01(\t\x12\x0e\n\x06job_id\x18\x02 \x01(\t"\x90\x02\n\x0fListJobsRequest\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x0e\n\x06region\x18\x06 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\x12\x14\n\x0c\x63luster_name\x18\x04 \x01(\t\x12T\n\x11job_state_matcher\x18\x05 \x01(\x0e\x32\x39.google.cloud.dataproc.v1.ListJobsRequest.JobStateMatcher\x12\x0e\n\x06\x66ilter\x18\x07 \x01(\t"6\n\x0fJobStateMatcher\x12\x07\n\x03\x41LL\x10\x00\x12\n\n\x06\x41\x43TIVE\x10\x01\x12\x0e\n\nNON_ACTIVE\x10\x02"\xa3\x01\n\x10UpdateJobRequest\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x0e\n\x06region\x18\x02 \x01(\t\x12\x0e\n\x06job_id\x18\x03 \x01(\t\x12*\n\x03job\x18\x04 \x01(\x0b\x32\x1d.google.cloud.dataproc.v1.Job\x12/\n\x0bupdate_mask\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"X\n\x10ListJobsResponse\x12+\n\x04jobs\x18\x01 \x03(\x0b\x32\x1d.google.cloud.dataproc.v1.Job\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"F\n\x10\x43\x61ncelJobRequest\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x0e\n\x06region\x18\x03 \x01(\t\x12\x0e\n\x06job_id\x18\x02 \x01(\t"F\n\x10\x44\x65leteJobRequest\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x0e\n\x06region\x18\x03 \x01(\t\x12\x0e\n\x06job_id\x18\x02 \x01(\t2\xb6\x07\n\rJobController\x12\x99\x01\n\tSubmitJob\x12*.google.cloud.dataproc.v1.SubmitJobRequest\x1a\x1d.google.cloud.dataproc.v1.Job"A\x82\xd3\xe4\x93\x02;"6/v1/projects/{project_id}/regions/{region}/jobs:submit:\x01*\x12\x92\x01\n\x06GetJob\x12\'.google.cloud.dataproc.v1.GetJobRequest\x1a\x1d.google.cloud.dataproc.v1.Job"@\x82\xd3\xe4\x93\x02:\x12\x38/v1/projects/{project_id}/regions/{region}/jobs/{job_id}\x12\x9a\x01\n\x08ListJobs\x12).google.cloud.dataproc.v1.ListJobsRequest\x1a*.google.cloud.dataproc.v1.ListJobsResponse"7\x82\xd3\xe4\x93\x02\x31\x12//v1/projects/{project_id}/regions/{region}/jobs\x12\x9d\x01\n\tUpdateJob\x12*.google.cloud.dataproc.v1.UpdateJobRequest\x1a\x1d.google.cloud.dataproc.v1.Job"E\x82\xd3\xe4\x93\x02?28/v1/projects/{project_id}/regions/{region}/jobs/{job_id}:\x03job\x12\xa2\x01\n\tCancelJob\x12*.google.cloud.dataproc.v1.CancelJobRequest\x1a\x1d.google.cloud.dataproc.v1.Job"J\x82\xd3\xe4\x93\x02\x44"?/v1/projects/{project_id}/regions/{region}/jobs/{job_id}:cancel:\x01*\x12\x91\x01\n\tDeleteJob\x12*.google.cloud.dataproc.v1.DeleteJobRequest\x1a\x16.google.protobuf.Empty"@\x82\xd3\xe4\x93\x02:*8/v1/projects/{project_id}/regions/{region}/jobs/{job_id}Bm\n\x1c\x63om.google.cloud.dataproc.v1B\tJobsProtoP\x01Z@google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataprocb\x06proto3' + '\n)google/cloud/dataproc_v1/proto/jobs.proto\x12\x18google.cloud.dataproc.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xc1\x02\n\rLoggingConfig\x12W\n\x11\x64river_log_levels\x18\x02 \x03(\x0b\x32<.google.cloud.dataproc.v1.LoggingConfig.DriverLogLevelsEntry\x1a\x65\n\x14\x44riverLogLevelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12<\n\x05value\x18\x02 \x01(\x0e\x32-.google.cloud.dataproc.v1.LoggingConfig.Level:\x02\x38\x01"p\n\x05Level\x12\x15\n\x11LEVEL_UNSPECIFIED\x10\x00\x12\x07\n\x03\x41LL\x10\x01\x12\t\n\x05TRACE\x10\x02\x12\t\n\x05\x44\x45\x42UG\x10\x03\x12\x08\n\x04INFO\x10\x04\x12\x08\n\x04WARN\x10\x05\x12\t\n\x05\x45RROR\x10\x06\x12\t\n\x05\x46\x41TAL\x10\x07\x12\x07\n\x03OFF\x10\x08"\xf1\x02\n\tHadoopJob\x12\x1b\n\x11main_jar_file_uri\x18\x01 \x01(\tH\x00\x12\x14\n\nmain_class\x18\x02 \x01(\tH\x00\x12\x11\n\x04\x61rgs\x18\x03 \x03(\tB\x03\xe0\x41\x01\x12\x1a\n\rjar_file_uris\x18\x04 \x03(\tB\x03\xe0\x41\x01\x12\x16\n\tfile_uris\x18\x05 \x03(\tB\x03\xe0\x41\x01\x12\x19\n\x0c\x61rchive_uris\x18\x06 \x03(\tB\x03\xe0\x41\x01\x12L\n\nproperties\x18\x07 \x03(\x0b\x32\x33.google.cloud.dataproc.v1.HadoopJob.PropertiesEntryB\x03\xe0\x41\x01\x12\x44\n\x0elogging_config\x18\x08 \x01(\x0b\x32\'.google.cloud.dataproc.v1.LoggingConfigB\x03\xe0\x41\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x08\n\x06\x64river"\xef\x02\n\x08SparkJob\x12\x1b\n\x11main_jar_file_uri\x18\x01 \x01(\tH\x00\x12\x14\n\nmain_class\x18\x02 \x01(\tH\x00\x12\x11\n\x04\x61rgs\x18\x03 \x03(\tB\x03\xe0\x41\x01\x12\x1a\n\rjar_file_uris\x18\x04 \x03(\tB\x03\xe0\x41\x01\x12\x16\n\tfile_uris\x18\x05 \x03(\tB\x03\xe0\x41\x01\x12\x19\n\x0c\x61rchive_uris\x18\x06 \x03(\tB\x03\xe0\x41\x01\x12K\n\nproperties\x18\x07 \x03(\x0b\x32\x32.google.cloud.dataproc.v1.SparkJob.PropertiesEntryB\x03\xe0\x41\x01\x12\x44\n\x0elogging_config\x18\x08 \x01(\x0b\x32\'.google.cloud.dataproc.v1.LoggingConfigB\x03\xe0\x41\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x08\n\x06\x64river"\xf8\x02\n\nPySparkJob\x12!\n\x14main_python_file_uri\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x11\n\x04\x61rgs\x18\x02 \x03(\tB\x03\xe0\x41\x01\x12\x1d\n\x10python_file_uris\x18\x03 \x03(\tB\x03\xe0\x41\x01\x12\x1a\n\rjar_file_uris\x18\x04 \x03(\tB\x03\xe0\x41\x01\x12\x16\n\tfile_uris\x18\x05 \x03(\tB\x03\xe0\x41\x01\x12\x19\n\x0c\x61rchive_uris\x18\x06 \x03(\tB\x03\xe0\x41\x01\x12M\n\nproperties\x18\x07 \x03(\x0b\x32\x34.google.cloud.dataproc.v1.PySparkJob.PropertiesEntryB\x03\xe0\x41\x01\x12\x44\n\x0elogging_config\x18\x08 \x01(\x0b\x32\'.google.cloud.dataproc.v1.LoggingConfigB\x03\xe0\x41\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"!\n\tQueryList\x12\x14\n\x07queries\x18\x01 \x03(\tB\x03\xe0\x41\x02"\xb5\x03\n\x07HiveJob\x12\x18\n\x0equery_file_uri\x18\x01 \x01(\tH\x00\x12\x39\n\nquery_list\x18\x02 \x01(\x0b\x32#.google.cloud.dataproc.v1.QueryListH\x00\x12 \n\x13\x63ontinue_on_failure\x18\x03 \x01(\x08\x42\x03\xe0\x41\x01\x12U\n\x10script_variables\x18\x04 \x03(\x0b\x32\x36.google.cloud.dataproc.v1.HiveJob.ScriptVariablesEntryB\x03\xe0\x41\x01\x12J\n\nproperties\x18\x05 \x03(\x0b\x32\x31.google.cloud.dataproc.v1.HiveJob.PropertiesEntryB\x03\xe0\x41\x01\x12\x1a\n\rjar_file_uris\x18\x06 \x03(\tB\x03\xe0\x41\x01\x1a\x36\n\x14ScriptVariablesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\t\n\x07queries"\xe5\x03\n\x0bSparkSqlJob\x12\x18\n\x0equery_file_uri\x18\x01 \x01(\tH\x00\x12\x39\n\nquery_list\x18\x02 \x01(\x0b\x32#.google.cloud.dataproc.v1.QueryListH\x00\x12Y\n\x10script_variables\x18\x03 \x03(\x0b\x32:.google.cloud.dataproc.v1.SparkSqlJob.ScriptVariablesEntryB\x03\xe0\x41\x01\x12N\n\nproperties\x18\x04 \x03(\x0b\x32\x35.google.cloud.dataproc.v1.SparkSqlJob.PropertiesEntryB\x03\xe0\x41\x01\x12\x1a\n\rjar_file_uris\x18\x38 \x03(\tB\x03\xe0\x41\x01\x12\x44\n\x0elogging_config\x18\x06 \x01(\x0b\x32\'.google.cloud.dataproc.v1.LoggingConfigB\x03\xe0\x41\x01\x1a\x36\n\x14ScriptVariablesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\t\n\x07queries"\xf8\x03\n\x06PigJob\x12\x18\n\x0equery_file_uri\x18\x01 \x01(\tH\x00\x12\x39\n\nquery_list\x18\x02 \x01(\x0b\x32#.google.cloud.dataproc.v1.QueryListH\x00\x12 \n\x13\x63ontinue_on_failure\x18\x03 \x01(\x08\x42\x03\xe0\x41\x01\x12T\n\x10script_variables\x18\x04 \x03(\x0b\x32\x35.google.cloud.dataproc.v1.PigJob.ScriptVariablesEntryB\x03\xe0\x41\x01\x12I\n\nproperties\x18\x05 \x03(\x0b\x32\x30.google.cloud.dataproc.v1.PigJob.PropertiesEntryB\x03\xe0\x41\x01\x12\x1a\n\rjar_file_uris\x18\x06 \x03(\tB\x03\xe0\x41\x01\x12\x44\n\x0elogging_config\x18\x07 \x01(\x0b\x32\'.google.cloud.dataproc.v1.LoggingConfigB\x03\xe0\x41\x01\x1a\x36\n\x14ScriptVariablesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\t\n\x07queries"D\n\x0cJobPlacement\x12\x19\n\x0c\x63luster_name\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_uuid\x18\x02 \x01(\tB\x03\xe0\x41\x03"\xd9\x03\n\tJobStatus\x12=\n\x05state\x18\x01 \x01(\x0e\x32).google.cloud.dataproc.v1.JobStatus.StateB\x03\xe0\x41\x03\x12\x17\n\x07\x64\x65tails\x18\x02 \x01(\tB\x06\xe0\x41\x03\xe0\x41\x01\x12\x39\n\x10state_start_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x43\n\x08substate\x18\x07 \x01(\x0e\x32,.google.cloud.dataproc.v1.JobStatus.SubstateB\x03\xe0\x41\x03"\xa9\x01\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07PENDING\x10\x01\x12\x0e\n\nSETUP_DONE\x10\x08\x12\x0b\n\x07RUNNING\x10\x02\x12\x12\n\x0e\x43\x41NCEL_PENDING\x10\x03\x12\x12\n\x0e\x43\x41NCEL_STARTED\x10\x07\x12\r\n\tCANCELLED\x10\x04\x12\x08\n\x04\x44ONE\x10\x05\x12\t\n\x05\x45RROR\x10\x06\x12\x13\n\x0f\x41TTEMPT_FAILURE\x10\t"H\n\x08Substate\x12\x0f\n\x0bUNSPECIFIED\x10\x00\x12\r\n\tSUBMITTED\x10\x01\x12\n\n\x06QUEUED\x10\x02\x12\x10\n\x0cSTALE_STATUS\x10\x03"<\n\x0cJobReference\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06job_id\x18\x02 \x01(\tB\x03\xe0\x41\x01"\xa5\x02\n\x0fYarnApplication\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x43\n\x05state\x18\x02 \x01(\x0e\x32/.google.cloud.dataproc.v1.YarnApplication.StateB\x03\xe0\x41\x02\x12\x15\n\x08progress\x18\x03 \x01(\x02\x42\x03\xe0\x41\x02\x12\x19\n\x0ctracking_url\x18\x04 \x01(\tB\x03\xe0\x41\x01"\x87\x01\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x07\n\x03NEW\x10\x01\x12\x0e\n\nNEW_SAVING\x10\x02\x12\r\n\tSUBMITTED\x10\x03\x12\x0c\n\x08\x41\x43\x43\x45PTED\x10\x04\x12\x0b\n\x07RUNNING\x10\x05\x12\x0c\n\x08\x46INISHED\x10\x06\x12\n\n\x06\x46\x41ILED\x10\x07\x12\n\n\x06KILLED\x10\x08"\xcd\x07\n\x03Job\x12>\n\treference\x18\x01 \x01(\x0b\x32&.google.cloud.dataproc.v1.JobReferenceB\x03\xe0\x41\x01\x12>\n\tplacement\x18\x02 \x01(\x0b\x32&.google.cloud.dataproc.v1.JobPlacementB\x03\xe0\x41\x02\x12\x39\n\nhadoop_job\x18\x03 \x01(\x0b\x32#.google.cloud.dataproc.v1.HadoopJobH\x00\x12\x37\n\tspark_job\x18\x04 \x01(\x0b\x32".google.cloud.dataproc.v1.SparkJobH\x00\x12;\n\x0bpyspark_job\x18\x05 \x01(\x0b\x32$.google.cloud.dataproc.v1.PySparkJobH\x00\x12\x35\n\x08hive_job\x18\x06 \x01(\x0b\x32!.google.cloud.dataproc.v1.HiveJobH\x00\x12\x33\n\x07pig_job\x18\x07 \x01(\x0b\x32 .google.cloud.dataproc.v1.PigJobH\x00\x12>\n\rspark_sql_job\x18\x0c \x01(\x0b\x32%.google.cloud.dataproc.v1.SparkSqlJobH\x00\x12\x38\n\x06status\x18\x08 \x01(\x0b\x32#.google.cloud.dataproc.v1.JobStatusB\x03\xe0\x41\x03\x12@\n\x0estatus_history\x18\r \x03(\x0b\x32#.google.cloud.dataproc.v1.JobStatusB\x03\xe0\x41\x03\x12I\n\x11yarn_applications\x18\t \x03(\x0b\x32).google.cloud.dataproc.v1.YarnApplicationB\x03\xe0\x41\x03\x12\'\n\x1a\x64river_output_resource_uri\x18\x11 \x01(\tB\x03\xe0\x41\x03\x12%\n\x18\x64river_control_files_uri\x18\x0f \x01(\tB\x03\xe0\x41\x03\x12>\n\x06labels\x18\x12 \x03(\x0b\x32).google.cloud.dataproc.v1.Job.LabelsEntryB\x03\xe0\x41\x01\x12@\n\nscheduling\x18\x14 \x01(\x0b\x32\'.google.cloud.dataproc.v1.JobSchedulingB\x03\xe0\x41\x01\x12\x15\n\x08job_uuid\x18\x16 \x01(\tB\x03\xe0\x41\x03\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\n\n\x08type_job"3\n\rJobScheduling\x12"\n\x15max_failures_per_hour\x18\x01 \x01(\x05\x42\x03\xe0\x41\x01"\x8a\x01\n\x10SubmitJobRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12/\n\x03job\x18\x02 \x01(\x0b\x32\x1d.google.cloud.dataproc.v1.JobB\x03\xe0\x41\x02\x12\x17\n\nrequest_id\x18\x04 \x01(\tB\x03\xe0\x41\x01"R\n\rGetJobRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06job_id\x18\x02 \x01(\tB\x03\xe0\x41\x02"\xb3\x02\n\x0fListJobsRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x06 \x01(\tB\x03\xe0\x41\x02\x12\x16\n\tpage_size\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01\x12\x17\n\npage_token\x18\x03 \x01(\tB\x03\xe0\x41\x01\x12\x19\n\x0c\x63luster_name\x18\x04 \x01(\tB\x03\xe0\x41\x01\x12Y\n\x11job_state_matcher\x18\x05 \x01(\x0e\x32\x39.google.cloud.dataproc.v1.ListJobsRequest.JobStateMatcherB\x03\xe0\x41\x01\x12\x13\n\x06\x66ilter\x18\x07 \x01(\tB\x03\xe0\x41\x01"6\n\x0fJobStateMatcher\x12\x07\n\x03\x41LL\x10\x00\x12\n\n\x06\x41\x43TIVE\x10\x01\x12\x0e\n\nNON_ACTIVE\x10\x02"\xbc\x01\n\x10UpdateJobRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06job_id\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12/\n\x03job\x18\x04 \x01(\x0b\x32\x1d.google.cloud.dataproc.v1.JobB\x03\xe0\x41\x02\x12\x34\n\x0bupdate_mask\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x02"b\n\x10ListJobsResponse\x12\x30\n\x04jobs\x18\x01 \x03(\x0b\x32\x1d.google.cloud.dataproc.v1.JobB\x03\xe0\x41\x03\x12\x1c\n\x0fnext_page_token\x18\x02 \x01(\tB\x03\xe0\x41\x01"U\n\x10\x43\x61ncelJobRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06job_id\x18\x02 \x01(\tB\x03\xe0\x41\x02"U\n\x10\x44\x65leteJobRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06job_id\x18\x02 \x01(\tB\x03\xe0\x41\x02\x32\x9b\t\n\rJobController\x12\xb1\x01\n\tSubmitJob\x12*.google.cloud.dataproc.v1.SubmitJobRequest\x1a\x1d.google.cloud.dataproc.v1.Job"Y\x82\xd3\xe4\x93\x02;"6/v1/projects/{project_id}/regions/{region}/jobs:submit:\x01*\xda\x41\x15project_id,region,job\x12\xad\x01\n\x06GetJob\x12\'.google.cloud.dataproc.v1.GetJobRequest\x1a\x1d.google.cloud.dataproc.v1.Job"[\x82\xd3\xe4\x93\x02:\x12\x38/v1/projects/{project_id}/regions/{region}/jobs/{job_id}\xda\x41\x18project_id,region,job_id\x12\xc9\x01\n\x08ListJobs\x12).google.cloud.dataproc.v1.ListJobsRequest\x1a*.google.cloud.dataproc.v1.ListJobsResponse"f\x82\xd3\xe4\x93\x02\x31\x12//v1/projects/{project_id}/regions/{region}/jobs\xda\x41\x11project_id,region\xda\x41\x18project_id,region,filter\x12\x9d\x01\n\tUpdateJob\x12*.google.cloud.dataproc.v1.UpdateJobRequest\x1a\x1d.google.cloud.dataproc.v1.Job"E\x82\xd3\xe4\x93\x02?28/v1/projects/{project_id}/regions/{region}/jobs/{job_id}:\x03job\x12\xbd\x01\n\tCancelJob\x12*.google.cloud.dataproc.v1.CancelJobRequest\x1a\x1d.google.cloud.dataproc.v1.Job"e\x82\xd3\xe4\x93\x02\x44"?/v1/projects/{project_id}/regions/{region}/jobs/{job_id}:cancel:\x01*\xda\x41\x18project_id,region,job_id\x12\xac\x01\n\tDeleteJob\x12*.google.cloud.dataproc.v1.DeleteJobRequest\x1a\x16.google.protobuf.Empty"[\x82\xd3\xe4\x93\x02:*8/v1/projects/{project_id}/regions/{region}/jobs/{job_id}\xda\x41\x18project_id,region,job_id\x1aK\xca\x41\x17\x64\x61taproc.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformBm\n\x1c\x63om.google.cloud.dataproc.v1B\tJobsProtoP\x01Z@google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataprocb\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_client__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR, google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, @@ -80,8 +84,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=407, - serialized_end=519, + serialized_start=465, + serialized_end=577, ) _sym_db.RegisterEnumDescriptor(_LOGGINGCONFIG_LEVEL) @@ -132,8 +136,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=3210, - serialized_end=3379, + serialized_start=3471, + serialized_end=3640, ) _sym_db.RegisterEnumDescriptor(_JOBSTATUS_STATE) @@ -158,8 +162,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=3381, - serialized_end=3453, + serialized_start=3642, + serialized_end=3714, ) _sym_db.RegisterEnumDescriptor(_JOBSTATUS_SUBSTATE) @@ -203,8 +207,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=3646, - serialized_end=3781, + serialized_start=3937, + serialized_end=4072, ) _sym_db.RegisterEnumDescriptor(_YARNAPPLICATION_STATE) @@ -226,8 +230,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=5165, - serialized_end=5219, + serialized_start=5582, + serialized_end=5636, ) _sym_db.RegisterEnumDescriptor(_LISTJOBSREQUEST_JOBSTATEMATCHER) @@ -284,8 +288,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=304, - serialized_end=405, + serialized_start=362, + serialized_end=463, ) _LOGGINGCONFIG = _descriptor.Descriptor( @@ -322,8 +326,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=198, - serialized_end=519, + serialized_start=256, + serialized_end=577, ) @@ -379,8 +383,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=802, - serialized_end=851, + serialized_start=890, + serialized_end=939, ) _HADOOPJOB = _descriptor.Descriptor( @@ -441,7 +445,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -459,7 +463,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -477,7 +481,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -495,7 +499,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -513,7 +517,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -531,7 +535,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -551,8 +555,8 @@ fields=[], ) ], - serialized_start=522, - serialized_end=861, + serialized_start=580, + serialized_end=949, ) @@ -608,8 +612,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=802, - serialized_end=851, + serialized_start=890, + serialized_end=939, ) _SPARKJOB = _descriptor.Descriptor( @@ -670,7 +674,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -688,7 +692,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -706,7 +710,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -724,7 +728,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -742,7 +746,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -760,7 +764,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -780,8 +784,8 @@ fields=[], ) ], - serialized_start=864, - serialized_end=1201, + serialized_start=952, + serialized_end=1319, ) @@ -837,8 +841,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=802, - serialized_end=851, + serialized_start=890, + serialized_end=939, ) _PYSPARKJOB = _descriptor.Descriptor( @@ -863,7 +867,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -881,7 +885,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -899,7 +903,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -917,7 +921,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -935,7 +939,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -953,7 +957,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -971,7 +975,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -989,7 +993,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -1001,8 +1005,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1204, - serialized_end=1540, + serialized_start=1322, + serialized_end=1698, ) @@ -1028,7 +1032,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ) ], @@ -1040,8 +1044,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1542, - serialized_end=1570, + serialized_start=1700, + serialized_end=1733, ) @@ -1097,8 +1101,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1874, - serialized_end=1928, + serialized_start=2057, + serialized_end=2111, ) _HIVEJOB_PROPERTIESENTRY = _descriptor.Descriptor( @@ -1153,8 +1157,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=802, - serialized_end=851, + serialized_start=890, + serialized_end=939, ) _HIVEJOB = _descriptor.Descriptor( @@ -1215,7 +1219,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1233,7 +1237,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1251,7 +1255,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1269,7 +1273,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -1289,8 +1293,8 @@ fields=[], ) ], - serialized_start=1573, - serialized_end=1990, + serialized_start=1736, + serialized_end=2173, ) @@ -1346,8 +1350,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1874, - serialized_end=1928, + serialized_start=2057, + serialized_end=2111, ) _SPARKSQLJOB_PROPERTIESENTRY = _descriptor.Descriptor( @@ -1402,8 +1406,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=802, - serialized_end=851, + serialized_start=890, + serialized_end=939, ) _SPARKSQLJOB = _descriptor.Descriptor( @@ -1464,7 +1468,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1482,7 +1486,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1500,7 +1504,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1518,7 +1522,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -1538,8 +1542,8 @@ fields=[], ) ], - serialized_start=1993, - serialized_end=2458, + serialized_start=2176, + serialized_end=2661, ) @@ -1595,8 +1599,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1874, - serialized_end=1928, + serialized_start=2057, + serialized_end=2111, ) _PIGJOB_PROPERTIESENTRY = _descriptor.Descriptor( @@ -1651,8 +1655,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=802, - serialized_end=851, + serialized_start=890, + serialized_end=939, ) _PIGJOB = _descriptor.Descriptor( @@ -1713,7 +1717,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1731,7 +1735,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1749,7 +1753,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1767,7 +1771,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1785,7 +1789,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -1805,8 +1809,8 @@ fields=[], ) ], - serialized_start=2461, - serialized_end=2940, + serialized_start=2664, + serialized_end=3168, ) @@ -1832,7 +1836,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1850,7 +1854,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], @@ -1862,8 +1866,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2942, - serialized_end=3000, + serialized_start=3170, + serialized_end=3238, ) @@ -1889,7 +1893,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1907,7 +1911,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1925,7 +1929,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1943,7 +1947,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], @@ -1955,8 +1959,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3003, - serialized_end=3453, + serialized_start=3241, + serialized_end=3714, ) @@ -1982,7 +1986,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2000,7 +2004,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -2012,8 +2016,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3455, - serialized_end=3505, + serialized_start=3716, + serialized_end=3776, ) @@ -2039,7 +2043,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2057,7 +2061,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2075,7 +2079,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2093,7 +2097,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -2105,8 +2109,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3508, - serialized_end=3781, + serialized_start=3779, + serialized_end=4072, ) @@ -2162,8 +2166,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4650, - serialized_end=4695, + serialized_start=4991, + serialized_end=5036, ) _JOB = _descriptor.Descriptor( @@ -2188,7 +2192,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2206,7 +2210,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2332,7 +2336,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2350,7 +2354,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2368,7 +2372,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2386,7 +2390,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2404,7 +2408,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2422,7 +2426,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2440,7 +2444,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2458,7 +2462,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], @@ -2478,8 +2482,8 @@ fields=[], ) ], - serialized_start=3784, - serialized_end=4707, + serialized_start=4075, + serialized_end=5048, ) @@ -2505,7 +2509,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ) ], @@ -2517,8 +2521,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4709, - serialized_end=4755, + serialized_start=5050, + serialized_end=5101, ) @@ -2544,7 +2548,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2562,7 +2566,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2580,7 +2584,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2598,7 +2602,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -2610,8 +2614,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4757, - serialized_end=4875, + serialized_start=5104, + serialized_end=5242, ) @@ -2637,7 +2641,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2655,7 +2659,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2673,7 +2677,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -2685,8 +2689,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4877, - serialized_end=4944, + serialized_start=5244, + serialized_end=5326, ) @@ -2712,7 +2716,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2730,7 +2734,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2748,7 +2752,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2766,7 +2770,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2784,7 +2788,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2802,7 +2806,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2820,7 +2824,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -2832,8 +2836,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4947, - serialized_end=5219, + serialized_start=5329, + serialized_end=5636, ) @@ -2859,7 +2863,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2877,7 +2881,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2895,7 +2899,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2913,7 +2917,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2931,7 +2935,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -2943,8 +2947,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5222, - serialized_end=5385, + serialized_start=5639, + serialized_end=5827, ) @@ -2970,7 +2974,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2988,7 +2992,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -3000,8 +3004,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5387, - serialized_end=5475, + serialized_start=5829, + serialized_end=5927, ) @@ -3027,7 +3031,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -3045,7 +3049,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -3063,7 +3067,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -3075,8 +3079,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5477, - serialized_end=5547, + serialized_start=5929, + serialized_end=6014, ) @@ -3102,7 +3106,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -3120,7 +3124,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -3138,7 +3142,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -3150,8 +3154,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5549, - serialized_end=5619, + serialized_start=6016, + serialized_end=6101, ) _LOGGINGCONFIG_DRIVERLOGLEVELSENTRY.fields_by_name[ @@ -3771,7 +3775,7 @@ state: Output only. A state message specifying the overall job state. details: - Output only. Optional job state details, such as an error + Optional. Output only. Job state details, such as an error description if the state is ERROR. state_start_time: Output only. The time when this state was entered. @@ -4156,24 +4160,110 @@ DESCRIPTOR._options = None _LOGGINGCONFIG_DRIVERLOGLEVELSENTRY._options = None _HADOOPJOB_PROPERTIESENTRY._options = None +_HADOOPJOB.fields_by_name["args"]._options = None +_HADOOPJOB.fields_by_name["jar_file_uris"]._options = None +_HADOOPJOB.fields_by_name["file_uris"]._options = None +_HADOOPJOB.fields_by_name["archive_uris"]._options = None +_HADOOPJOB.fields_by_name["properties"]._options = None +_HADOOPJOB.fields_by_name["logging_config"]._options = None _SPARKJOB_PROPERTIESENTRY._options = None +_SPARKJOB.fields_by_name["args"]._options = None +_SPARKJOB.fields_by_name["jar_file_uris"]._options = None +_SPARKJOB.fields_by_name["file_uris"]._options = None +_SPARKJOB.fields_by_name["archive_uris"]._options = None +_SPARKJOB.fields_by_name["properties"]._options = None +_SPARKJOB.fields_by_name["logging_config"]._options = None _PYSPARKJOB_PROPERTIESENTRY._options = None +_PYSPARKJOB.fields_by_name["main_python_file_uri"]._options = None +_PYSPARKJOB.fields_by_name["args"]._options = None +_PYSPARKJOB.fields_by_name["python_file_uris"]._options = None +_PYSPARKJOB.fields_by_name["jar_file_uris"]._options = None +_PYSPARKJOB.fields_by_name["file_uris"]._options = None +_PYSPARKJOB.fields_by_name["archive_uris"]._options = None +_PYSPARKJOB.fields_by_name["properties"]._options = None +_PYSPARKJOB.fields_by_name["logging_config"]._options = None +_QUERYLIST.fields_by_name["queries"]._options = None _HIVEJOB_SCRIPTVARIABLESENTRY._options = None _HIVEJOB_PROPERTIESENTRY._options = None +_HIVEJOB.fields_by_name["continue_on_failure"]._options = None +_HIVEJOB.fields_by_name["script_variables"]._options = None +_HIVEJOB.fields_by_name["properties"]._options = None +_HIVEJOB.fields_by_name["jar_file_uris"]._options = None _SPARKSQLJOB_SCRIPTVARIABLESENTRY._options = None _SPARKSQLJOB_PROPERTIESENTRY._options = None +_SPARKSQLJOB.fields_by_name["script_variables"]._options = None +_SPARKSQLJOB.fields_by_name["properties"]._options = None +_SPARKSQLJOB.fields_by_name["jar_file_uris"]._options = None +_SPARKSQLJOB.fields_by_name["logging_config"]._options = None _PIGJOB_SCRIPTVARIABLESENTRY._options = None _PIGJOB_PROPERTIESENTRY._options = None +_PIGJOB.fields_by_name["continue_on_failure"]._options = None +_PIGJOB.fields_by_name["script_variables"]._options = None +_PIGJOB.fields_by_name["properties"]._options = None +_PIGJOB.fields_by_name["jar_file_uris"]._options = None +_PIGJOB.fields_by_name["logging_config"]._options = None +_JOBPLACEMENT.fields_by_name["cluster_name"]._options = None +_JOBPLACEMENT.fields_by_name["cluster_uuid"]._options = None +_JOBSTATUS.fields_by_name["state"]._options = None +_JOBSTATUS.fields_by_name["details"]._options = None +_JOBSTATUS.fields_by_name["state_start_time"]._options = None +_JOBSTATUS.fields_by_name["substate"]._options = None +_JOBREFERENCE.fields_by_name["project_id"]._options = None +_JOBREFERENCE.fields_by_name["job_id"]._options = None +_YARNAPPLICATION.fields_by_name["name"]._options = None +_YARNAPPLICATION.fields_by_name["state"]._options = None +_YARNAPPLICATION.fields_by_name["progress"]._options = None +_YARNAPPLICATION.fields_by_name["tracking_url"]._options = None _JOB_LABELSENTRY._options = None +_JOB.fields_by_name["reference"]._options = None +_JOB.fields_by_name["placement"]._options = None +_JOB.fields_by_name["status"]._options = None +_JOB.fields_by_name["status_history"]._options = None +_JOB.fields_by_name["yarn_applications"]._options = None +_JOB.fields_by_name["driver_output_resource_uri"]._options = None +_JOB.fields_by_name["driver_control_files_uri"]._options = None +_JOB.fields_by_name["labels"]._options = None +_JOB.fields_by_name["scheduling"]._options = None +_JOB.fields_by_name["job_uuid"]._options = None +_JOBSCHEDULING.fields_by_name["max_failures_per_hour"]._options = None +_SUBMITJOBREQUEST.fields_by_name["project_id"]._options = None +_SUBMITJOBREQUEST.fields_by_name["region"]._options = None +_SUBMITJOBREQUEST.fields_by_name["job"]._options = None +_SUBMITJOBREQUEST.fields_by_name["request_id"]._options = None +_GETJOBREQUEST.fields_by_name["project_id"]._options = None +_GETJOBREQUEST.fields_by_name["region"]._options = None +_GETJOBREQUEST.fields_by_name["job_id"]._options = None +_LISTJOBSREQUEST.fields_by_name["project_id"]._options = None +_LISTJOBSREQUEST.fields_by_name["region"]._options = None +_LISTJOBSREQUEST.fields_by_name["page_size"]._options = None +_LISTJOBSREQUEST.fields_by_name["page_token"]._options = None +_LISTJOBSREQUEST.fields_by_name["cluster_name"]._options = None +_LISTJOBSREQUEST.fields_by_name["job_state_matcher"]._options = None +_LISTJOBSREQUEST.fields_by_name["filter"]._options = None +_UPDATEJOBREQUEST.fields_by_name["project_id"]._options = None +_UPDATEJOBREQUEST.fields_by_name["region"]._options = None +_UPDATEJOBREQUEST.fields_by_name["job_id"]._options = None +_UPDATEJOBREQUEST.fields_by_name["job"]._options = None +_UPDATEJOBREQUEST.fields_by_name["update_mask"]._options = None +_LISTJOBSRESPONSE.fields_by_name["jobs"]._options = None +_LISTJOBSRESPONSE.fields_by_name["next_page_token"]._options = None +_CANCELJOBREQUEST.fields_by_name["project_id"]._options = None +_CANCELJOBREQUEST.fields_by_name["region"]._options = None +_CANCELJOBREQUEST.fields_by_name["job_id"]._options = None +_DELETEJOBREQUEST.fields_by_name["project_id"]._options = None +_DELETEJOBREQUEST.fields_by_name["region"]._options = None +_DELETEJOBREQUEST.fields_by_name["job_id"]._options = None _JOBCONTROLLER = _descriptor.ServiceDescriptor( name="JobController", full_name="google.cloud.dataproc.v1.JobController", file=DESCRIPTOR, index=0, - serialized_options=None, - serialized_start=5622, - serialized_end=6572, + serialized_options=_b( + "\312A\027dataproc.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform" + ), + serialized_start=6104, + serialized_end=7283, methods=[ _descriptor.MethodDescriptor( name="SubmitJob", @@ -4183,7 +4273,7 @@ input_type=_SUBMITJOBREQUEST, output_type=_JOB, serialized_options=_b( - '\202\323\344\223\002;"6/v1/projects/{project_id}/regions/{region}/jobs:submit:\001*' + '\202\323\344\223\002;"6/v1/projects/{project_id}/regions/{region}/jobs:submit:\001*\332A\025project_id,region,job' ), ), _descriptor.MethodDescriptor( @@ -4194,7 +4284,7 @@ input_type=_GETJOBREQUEST, output_type=_JOB, serialized_options=_b( - "\202\323\344\223\002:\0228/v1/projects/{project_id}/regions/{region}/jobs/{job_id}" + "\202\323\344\223\002:\0228/v1/projects/{project_id}/regions/{region}/jobs/{job_id}\332A\030project_id,region,job_id" ), ), _descriptor.MethodDescriptor( @@ -4205,7 +4295,7 @@ input_type=_LISTJOBSREQUEST, output_type=_LISTJOBSRESPONSE, serialized_options=_b( - "\202\323\344\223\0021\022//v1/projects/{project_id}/regions/{region}/jobs" + "\202\323\344\223\0021\022//v1/projects/{project_id}/regions/{region}/jobs\332A\021project_id,region\332A\030project_id,region,filter" ), ), _descriptor.MethodDescriptor( @@ -4227,7 +4317,7 @@ input_type=_CANCELJOBREQUEST, output_type=_JOB, serialized_options=_b( - '\202\323\344\223\002D"?/v1/projects/{project_id}/regions/{region}/jobs/{job_id}:cancel:\001*' + '\202\323\344\223\002D"?/v1/projects/{project_id}/regions/{region}/jobs/{job_id}:cancel:\001*\332A\030project_id,region,job_id' ), ), _descriptor.MethodDescriptor( @@ -4238,7 +4328,7 @@ input_type=_DELETEJOBREQUEST, output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, serialized_options=_b( - "\202\323\344\223\002:*8/v1/projects/{project_id}/regions/{region}/jobs/{job_id}" + "\202\323\344\223\002:*8/v1/projects/{project_id}/regions/{region}/jobs/{job_id}\332A\030project_id,region,job_id" ), ), ], diff --git a/dataproc/google/cloud/dataproc_v1/proto/operations.proto b/dataproc/google/cloud/dataproc_v1/proto/operations.proto index c820cd8e65dc..4af2a5f80795 100644 --- a/dataproc/google/cloud/dataproc_v1/proto/operations.proto +++ b/dataproc/google/cloud/dataproc_v1/proto/operations.proto @@ -17,8 +17,9 @@ syntax = "proto3"; package google.cloud.dataproc.v1; -import "google/api/annotations.proto"; +import "google/api/field_behavior.proto"; import "google/protobuf/timestamp.proto"; +import "google/api/annotations.proto"; option go_package = "google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataproc"; option java_multiple_files = true; @@ -43,41 +44,41 @@ message ClusterOperationStatus { } // Output only. A message containing the operation state. - State state = 1; + State state = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. A message containing the detailed operation state. - string inner_state = 2; + string inner_state = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. A message containing any operation metadata details. - string details = 3; + string details = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The time this state was entered. - google.protobuf.Timestamp state_start_time = 4; + google.protobuf.Timestamp state_start_time = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; } // Metadata describing the operation. message ClusterOperationMetadata { // Output only. Name of the cluster for the operation. - string cluster_name = 7; + string cluster_name = 7 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. Cluster UUID for the operation. - string cluster_uuid = 8; + string cluster_uuid = 8 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. Current operation status. - ClusterOperationStatus status = 9; + ClusterOperationStatus status = 9 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The previous operation status. - repeated ClusterOperationStatus status_history = 10; + repeated ClusterOperationStatus status_history = 10 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The operation type. - string operation_type = 11; + string operation_type = 11 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. Short description of operation. - string description = 12; + string description = 12 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. Labels associated with the operation - map labels = 13; + map labels = 13 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. Errors encountered during operation execution. - repeated string warnings = 14; + repeated string warnings = 14 [(google.api.field_behavior) = OUTPUT_ONLY]; } diff --git a/dataproc/google/cloud/dataproc_v1/proto/operations_pb2.py b/dataproc/google/cloud/dataproc_v1/proto/operations_pb2.py index 0f09da0c701d..f7fadd195d52 100644 --- a/dataproc/google/cloud/dataproc_v1/proto/operations_pb2.py +++ b/dataproc/google/cloud/dataproc_v1/proto/operations_pb2.py @@ -15,8 +15,9 @@ _sym_db = _symbol_database.Default() -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 DESCRIPTOR = _descriptor.FileDescriptor( @@ -27,11 +28,12 @@ "\n\034com.google.cloud.dataproc.v1B\017OperationsProtoP\001Z@google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataproc" ), serialized_pb=_b( - '\n/google/cloud/dataproc_v1/proto/operations.proto\x12\x18google.cloud.dataproc.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xf5\x01\n\x16\x43lusterOperationStatus\x12\x45\n\x05state\x18\x01 \x01(\x0e\x32\x36.google.cloud.dataproc.v1.ClusterOperationStatus.State\x12\x13\n\x0binner_state\x18\x02 \x01(\t\x12\x0f\n\x07\x64\x65tails\x18\x03 \x01(\t\x12\x34\n\x10state_start_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"8\n\x05State\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0b\n\x07PENDING\x10\x01\x12\x0b\n\x07RUNNING\x10\x02\x12\x08\n\x04\x44ONE\x10\x03"\x90\x03\n\x18\x43lusterOperationMetadata\x12\x14\n\x0c\x63luster_name\x18\x07 \x01(\t\x12\x14\n\x0c\x63luster_uuid\x18\x08 \x01(\t\x12@\n\x06status\x18\t \x01(\x0b\x32\x30.google.cloud.dataproc.v1.ClusterOperationStatus\x12H\n\x0estatus_history\x18\n \x03(\x0b\x32\x30.google.cloud.dataproc.v1.ClusterOperationStatus\x12\x16\n\x0eoperation_type\x18\x0b \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x0c \x01(\t\x12N\n\x06labels\x18\r \x03(\x0b\x32>.google.cloud.dataproc.v1.ClusterOperationMetadata.LabelsEntry\x12\x10\n\x08warnings\x18\x0e \x03(\t\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42s\n\x1c\x63om.google.cloud.dataproc.v1B\x0fOperationsProtoP\x01Z@google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataprocb\x06proto3' + '\n/google/cloud/dataproc_v1/proto/operations.proto\x12\x18google.cloud.dataproc.v1\x1a\x1fgoogle/api/field_behavior.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1cgoogle/api/annotations.proto"\x89\x02\n\x16\x43lusterOperationStatus\x12J\n\x05state\x18\x01 \x01(\x0e\x32\x36.google.cloud.dataproc.v1.ClusterOperationStatus.StateB\x03\xe0\x41\x03\x12\x18\n\x0binner_state\x18\x02 \x01(\tB\x03\xe0\x41\x03\x12\x14\n\x07\x64\x65tails\x18\x03 \x01(\tB\x03\xe0\x41\x03\x12\x39\n\x10state_start_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03"8\n\x05State\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0b\n\x07PENDING\x10\x01\x12\x0b\n\x07RUNNING\x10\x02\x12\x08\n\x04\x44ONE\x10\x03"\xb8\x03\n\x18\x43lusterOperationMetadata\x12\x19\n\x0c\x63luster_name\x18\x07 \x01(\tB\x03\xe0\x41\x03\x12\x19\n\x0c\x63luster_uuid\x18\x08 \x01(\tB\x03\xe0\x41\x03\x12\x45\n\x06status\x18\t \x01(\x0b\x32\x30.google.cloud.dataproc.v1.ClusterOperationStatusB\x03\xe0\x41\x03\x12M\n\x0estatus_history\x18\n \x03(\x0b\x32\x30.google.cloud.dataproc.v1.ClusterOperationStatusB\x03\xe0\x41\x03\x12\x1b\n\x0eoperation_type\x18\x0b \x01(\tB\x03\xe0\x41\x03\x12\x18\n\x0b\x64\x65scription\x18\x0c \x01(\tB\x03\xe0\x41\x03\x12S\n\x06labels\x18\r \x03(\x0b\x32>.google.cloud.dataproc.v1.ClusterOperationMetadata.LabelsEntryB\x03\xe0\x41\x03\x12\x15\n\x08warnings\x18\x0e \x03(\tB\x03\xe0\x41\x03\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42s\n\x1c\x63om.google.cloud.dataproc.v1B\x0fOperationsProtoP\x01Z@google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataprocb\x06proto3' ), dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, + google_dot_api_dot_annotations__pb2.DESCRIPTOR, ], ) @@ -57,8 +59,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=330, - serialized_end=386, + serialized_start=383, + serialized_end=439, ) _sym_db.RegisterEnumDescriptor(_CLUSTEROPERATIONSTATUS_STATE) @@ -85,7 +87,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -103,7 +105,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -121,7 +123,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -139,7 +141,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], @@ -151,8 +153,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=141, - serialized_end=386, + serialized_start=174, + serialized_end=439, ) @@ -208,8 +210,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=744, - serialized_end=789, + serialized_start=837, + serialized_end=882, ) _CLUSTEROPERATIONMETADATA = _descriptor.Descriptor( @@ -234,7 +236,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -252,7 +254,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -270,7 +272,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -288,7 +290,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -306,7 +308,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -324,7 +326,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -342,7 +344,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -360,7 +362,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], @@ -372,8 +374,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=389, - serialized_end=789, + serialized_start=442, + serialized_end=882, ) _CLUSTEROPERATIONSTATUS.fields_by_name[ @@ -467,5 +469,17 @@ DESCRIPTOR._options = None +_CLUSTEROPERATIONSTATUS.fields_by_name["state"]._options = None +_CLUSTEROPERATIONSTATUS.fields_by_name["inner_state"]._options = None +_CLUSTEROPERATIONSTATUS.fields_by_name["details"]._options = None +_CLUSTEROPERATIONSTATUS.fields_by_name["state_start_time"]._options = None _CLUSTEROPERATIONMETADATA_LABELSENTRY._options = None +_CLUSTEROPERATIONMETADATA.fields_by_name["cluster_name"]._options = None +_CLUSTEROPERATIONMETADATA.fields_by_name["cluster_uuid"]._options = None +_CLUSTEROPERATIONMETADATA.fields_by_name["status"]._options = None +_CLUSTEROPERATIONMETADATA.fields_by_name["status_history"]._options = None +_CLUSTEROPERATIONMETADATA.fields_by_name["operation_type"]._options = None +_CLUSTEROPERATIONMETADATA.fields_by_name["description"]._options = None +_CLUSTEROPERATIONMETADATA.fields_by_name["labels"]._options = None +_CLUSTEROPERATIONMETADATA.fields_by_name["warnings"]._options = None # @@protoc_insertion_point(module_scope) diff --git a/dataproc/google/cloud/dataproc_v1/proto/workflow_templates.proto b/dataproc/google/cloud/dataproc_v1/proto/workflow_templates.proto index 61295a5500dc..8976c42e29a0 100644 --- a/dataproc/google/cloud/dataproc_v1/proto/workflow_templates.proto +++ b/dataproc/google/cloud/dataproc_v1/proto/workflow_templates.proto @@ -18,6 +18,9 @@ syntax = "proto3"; package google.cloud.dataproc.v1; import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; import "google/cloud/dataproc/v1/clusters.proto"; import "google/cloud/dataproc/v1/jobs.proto"; import "google/longrunning/operations.proto"; @@ -32,6 +35,9 @@ option java_package = "com.google.cloud.dataproc.v1"; // The API interface for managing Workflow Templates in the // Cloud Dataproc API. service WorkflowTemplateService { + option (google.api.default_host) = "dataproc.googleapis.com"; + option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; + // Creates new workflow template. rpc CreateWorkflowTemplate(CreateWorkflowTemplateRequest) returns (WorkflowTemplate) { option (google.api.http) = { @@ -42,6 +48,7 @@ service WorkflowTemplateService { body: "template" } }; + option (google.api.method_signature) = "parent,template"; } // Retrieves the latest workflow template. @@ -55,6 +62,7 @@ service WorkflowTemplateService { get: "/v1/{name=projects/*/regions/*/workflowTemplates/*}" } }; + option (google.api.method_signature) = "name"; } // Instantiates a template and begins execution. @@ -70,7 +78,9 @@ service WorkflowTemplateService { // clusters to be deleted. // // The [Operation.metadata][google.longrunning.Operation.metadata] will be - // [WorkflowMetadata][google.cloud.dataproc.v1.WorkflowMetadata]. + // [WorkflowMetadata](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#workflowmetadata). + // Also see [Using + // WorkflowMetadata](/dataproc/docs/concepts/workflows/debugging#using_workflowmetadata). // // On successful completion, // [Operation.response][google.longrunning.Operation.response] will be @@ -84,6 +94,12 @@ service WorkflowTemplateService { body: "*" } }; + option (google.api.method_signature) = "name"; + option (google.api.method_signature) = "name,parameters"; + option (google.longrunning.operation_info) = { + response_type: "google.protobuf.Empty" + metadata_type: "WorkflowMetadata" + }; } // Instantiates a template and begins execution. @@ -103,7 +119,9 @@ service WorkflowTemplateService { // clusters to be deleted. // // The [Operation.metadata][google.longrunning.Operation.metadata] will be - // [WorkflowMetadata][google.cloud.dataproc.v1.WorkflowMetadata]. + // [WorkflowMetadata](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#workflowmetadata). + // Also see [Using + // WorkflowMetadata](/dataproc/docs/concepts/workflows/debugging#using_workflowmetadata). // // On successful completion, // [Operation.response][google.longrunning.Operation.response] will be @@ -117,6 +135,11 @@ service WorkflowTemplateService { body: "template" } }; + option (google.api.method_signature) = "parent,template"; + option (google.longrunning.operation_info) = { + response_type: "google.protobuf.Empty" + metadata_type: "WorkflowMetadata" + }; } // Updates (replaces) workflow template. The updated template @@ -130,6 +153,7 @@ service WorkflowTemplateService { body: "template" } }; + option (google.api.method_signature) = "template"; } // Lists workflows that match the specified filter in the request. @@ -140,6 +164,7 @@ service WorkflowTemplateService { get: "/v1/{parent=projects/*/regions/*}/workflowTemplates" } }; + option (google.api.method_signature) = "parent"; } // Deletes a workflow template. It does not cancel in-progress workflows. @@ -150,22 +175,32 @@ service WorkflowTemplateService { delete: "/v1/{name=projects/*/regions/*/workflowTemplates/*}" } }; + option (google.api.method_signature) = "name"; } } // A Cloud Dataproc workflow template resource. message WorkflowTemplate { - // Required. The template id. - // - // The id must contain only letters (a-z, A-Z), numbers (0-9), - // underscores (_), and hyphens (-). Cannot begin or end with underscore - // or hyphen. Must consist of between 3 and 50 characters. - string id = 2; + option (google.api.resource) = { + type: "dataproc.googleapis.com/WorkflowTemplate" + pattern: "projects/{project}/regions/{region}/workflowTemplates/{workflow_template}" + pattern: "projects/{project}/locations/{location}/workflowTemplates/{workflow_template}" + history: ORIGINALLY_SINGLE_PATTERN + }; + + string id = 2 [(google.api.field_behavior) = REQUIRED]; - // Output only. The "resource name" of the template, as described - // in https://cloud.google.com/apis/design/resource_names of the form - // `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` - string name = 1; + // Output only. The resource name of the workflow template, as described + // in https://cloud.google.com/apis/design/resource_names. + // + // * For `projects.regions.workflowTemplates`, the resource name of the + // template has the following format: + // `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` + // + // * For `projects.locations.workflowTemplates`, the resource name of the + // template has the following format: + // `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` + string name = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; // Optional. Used to perform a consistent read-modify-write. // @@ -176,13 +211,13 @@ message WorkflowTemplate { // the current template with the `version` field filled in with the // current server version. The user updates other fields in the template, // then returns it as part of the `UpdateWorkflowTemplate` request. - int32 version = 3; + int32 version = 3 [(google.api.field_behavior) = OPTIONAL]; // Output only. The time template was created. - google.protobuf.Timestamp create_time = 4; + google.protobuf.Timestamp create_time = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The time template was last updated. - google.protobuf.Timestamp update_time = 5; + google.protobuf.Timestamp update_time = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; // Optional. The labels to associate with this template. These labels // will be propagated to all jobs and clusters created by the workflow @@ -196,18 +231,18 @@ message WorkflowTemplate { // [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt). // // No more than 32 labels can be associated with a template. - map labels = 6; + map labels = 6 [(google.api.field_behavior) = OPTIONAL]; // Required. WorkflowTemplate scheduling information. - WorkflowTemplatePlacement placement = 7; + WorkflowTemplatePlacement placement = 7 [(google.api.field_behavior) = REQUIRED]; // Required. The Directed Acyclic Graph of Jobs to submit. - repeated OrderedJob jobs = 8; + repeated OrderedJob jobs = 8 [(google.api.field_behavior) = REQUIRED]; - // Optional. Template parameters whose values are substituted into the + // Optional. emplate parameters whose values are substituted into the // template. Values for parameters must be provided when the template is // instantiated. - repeated TemplateParameter parameters = 9; + repeated TemplateParameter parameters = 9 [(google.api.field_behavior) = OPTIONAL]; } // Specifies workflow execution target. @@ -217,7 +252,7 @@ message WorkflowTemplatePlacement { // Required. Specifies where workflow executes; either on a managed // cluster or an existing cluster chosen by labels. oneof placement { - // Optional. A cluster that is managed by the workflow. + // A cluster that is managed by the workflow. ManagedCluster managed_cluster = 1; // Optional. A selector that chooses target cluster for jobs based @@ -236,10 +271,10 @@ message ManagedCluster { // The name must contain only lower-case letters (a-z), numbers (0-9), // and hyphens (-). Must begin with a letter. Cannot begin or end with // hyphen. Must consist of between 2 and 35 characters. - string cluster_name = 2; + string cluster_name = 2 [(google.api.field_behavior) = REQUIRED]; // Required. The cluster configuration. - ClusterConfig config = 3; + ClusterConfig config = 3 [(google.api.field_behavior) = REQUIRED]; // Optional. The labels to associate with this cluster. // @@ -251,7 +286,7 @@ message ManagedCluster { // the following PCRE regular expression: [\p{Ll}\p{Lo}\p{N}_-]{0,63} // // No more than 32 labels can be associated with a given cluster. - map labels = 4; + map labels = 4 [(google.api.field_behavior) = OPTIONAL]; } // A selector that chooses target cluster for jobs based on metadata. @@ -261,11 +296,11 @@ message ClusterSelector { // // If unspecified, the zone of the first cluster matching the selector // is used. - string zone = 1; + string zone = 1 [(google.api.field_behavior) = OPTIONAL]; // Required. The cluster labels. Cluster must have all labels // to match. - map cluster_labels = 2; + map cluster_labels = 2 [(google.api.field_behavior) = REQUIRED]; } // A job executed by the workflow. @@ -281,7 +316,7 @@ message OrderedJob { // The id must contain only letters (a-z, A-Z), numbers (0-9), // underscores (_), and hyphens (-). Cannot begin or end with underscore // or hyphen. Must consist of between 3 and 50 characters. - string step_id = 1; + string step_id = 1 [(google.api.field_behavior) = REQUIRED]; // Required. The job definition. oneof job_type { @@ -314,14 +349,14 @@ message OrderedJob { // the following regular expression: [\p{Ll}\p{Lo}\p{N}_-]{0,63} // // No more than 32 labels can be associated with a given job. - map labels = 8; + map labels = 8 [(google.api.field_behavior) = OPTIONAL]; // Optional. Job scheduling configuration. - JobScheduling scheduling = 9; + JobScheduling scheduling = 9 [(google.api.field_behavior) = OPTIONAL]; // Optional. The optional list of prerequisite job step_ids. // If not specified, the job will start at the beginning of workflow. - repeated string prerequisite_step_ids = 10; + repeated string prerequisite_step_ids = 10 [(google.api.field_behavior) = OPTIONAL]; } // A configurable parameter that replaces one or more fields in the template. @@ -334,14 +369,14 @@ message OrderedJob { // - Main class (in HadoopJob and SparkJob) // - Zone (in ClusterSelector) message TemplateParameter { - // Required. Parameter name. + // Required. Parameter name. // The parameter name is used as the key, and paired with the // parameter value, which are passed to the template when the template // is instantiated. // The name must contain only capital letters (A-Z), numbers (0-9), and // underscores (_), and must not start with a number. The maximum length is // 40 characters. - string name = 1; + string name = 1 [(google.api.field_behavior) = REQUIRED]; // Required. Paths to all fields that the parameter replaces. // A field is allowed to appear in at most one parameter's list of field @@ -387,14 +422,14 @@ message TemplateParameter { // // - placement.clusterSelector.clusterLabels // - jobs['step-id'].sparkJob.args - repeated string fields = 2; + repeated string fields = 2 [(google.api.field_behavior) = REQUIRED]; // Optional. Brief description of the parameter. // Must not exceed 1024 characters. - string description = 3; + string description = 3 [(google.api.field_behavior) = OPTIONAL]; // Optional. Validation rules to be applied to this parameter's value. - ParameterValidation validation = 4; + ParameterValidation validation = 4 [(google.api.field_behavior) = OPTIONAL]; } // Configuration for parameter validation. @@ -414,13 +449,13 @@ message RegexValidation { // Required. RE2 regular expressions used to validate the parameter's value. // The value must match the regex in its entirety (substring // matches are not sufficient). - repeated string regexes = 1; + repeated string regexes = 1 [(google.api.field_behavior) = REQUIRED]; } // Validation based on a list of allowed values. message ValueValidation { // Required. List of allowed values for the parameter. - repeated string values = 1; + repeated string values = 1 [(google.api.field_behavior) = REQUIRED]; } // A Cloud Dataproc workflow template resource. @@ -440,57 +475,66 @@ message WorkflowMetadata { DONE = 3; } - // Output only. The "resource name" of the template. - string template = 1; + // Output only. The resource name of the workflow template as described + // in https://cloud.google.com/apis/design/resource_names. + // + // * For `projects.regions.workflowTemplates`, the resource name of the + // template has the following format: + // `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` + // + // * For `projects.locations.workflowTemplates`, the resource name of the + // template has the following format: + // `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` + string template = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The version of template at the time of // workflow instantiation. - int32 version = 2; + int32 version = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The create cluster operation metadata. - ClusterOperation create_cluster = 3; + ClusterOperation create_cluster = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The workflow graph. - WorkflowGraph graph = 4; + WorkflowGraph graph = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The delete cluster operation metadata. - ClusterOperation delete_cluster = 5; + ClusterOperation delete_cluster = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The workflow state. - State state = 6; + State state = 6 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The name of the target cluster. - string cluster_name = 7; + string cluster_name = 7 [(google.api.field_behavior) = OUTPUT_ONLY]; // Map from parameter names to values that were used for those parameters. map parameters = 8; // Output only. Workflow start time. - google.protobuf.Timestamp start_time = 9; + google.protobuf.Timestamp start_time = 9 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. Workflow end time. - google.protobuf.Timestamp end_time = 10; + google.protobuf.Timestamp end_time = 10 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The UUID of target cluster. - string cluster_uuid = 11; + string cluster_uuid = 11 [(google.api.field_behavior) = OUTPUT_ONLY]; } // The cluster operation triggered by a workflow. message ClusterOperation { // Output only. The id of the cluster operation. - string operation_id = 1; + string operation_id = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. Error, if operation failed. - string error = 2; + string error = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. Indicates the operation is done. - bool done = 3; + bool done = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; } // The workflow graph. message WorkflowGraph { // Output only. The workflow nodes. - repeated WorkflowNode nodes = 1; + repeated WorkflowNode nodes = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; } // The workflow node. @@ -518,52 +562,88 @@ message WorkflowNode { } // Output only. The name of the node. - string step_id = 1; + string step_id = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. Node's prerequisite nodes. - repeated string prerequisite_step_ids = 2; + repeated string prerequisite_step_ids = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The job id; populated after the node enters RUNNING state. - string job_id = 3; + string job_id = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The node state. - NodeState state = 5; + NodeState state = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The error detail. - string error = 6; + string error = 6 [(google.api.field_behavior) = OUTPUT_ONLY]; } // A request to create a workflow template. message CreateWorkflowTemplateRequest { - // Required. The "resource name" of the region, as described - // in https://cloud.google.com/apis/design/resource_names of the form - // `projects/{project_id}/regions/{region}` - string parent = 1; + // Required. The resource name of the region or location, as described + // in https://cloud.google.com/apis/design/resource_names. + // + // * For `projects.regions.workflowTemplates,create`, the resource name of the + // region has the following format: + // `projects/{project_id}/regions/{region}` + // + // * For `projects.locations.workflowTemplates.create`, the resource name of + // the location has the following format: + // `projects/{project_id}/locations/{location}` + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + child_type: "dataproc.googleapis.com/WorkflowTemplate" + } + ]; // Required. The Dataproc workflow template to create. - WorkflowTemplate template = 2; + WorkflowTemplate template = 2 [(google.api.field_behavior) = REQUIRED]; } // A request to fetch a workflow template. message GetWorkflowTemplateRequest { - // Required. The "resource name" of the workflow template, as described - // in https://cloud.google.com/apis/design/resource_names of the form - // `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` - string name = 1; + // Required. The resource name of the workflow template, as described + // in https://cloud.google.com/apis/design/resource_names. + // + // * For `projects.regions.workflowTemplates.get`, the resource name of the + // template has the following format: + // `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` + // + // * For `projects.locations.workflowTemplates.get`, the resource name of the + // template has the following format: + // `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "dataproc.googleapis.com/WorkflowTemplate" + } + ]; // Optional. The version of workflow template to retrieve. Only previously - // instatiated versions can be retrieved. + // instantiated versions can be retrieved. // // If unspecified, retrieves the current version. - int32 version = 2; + int32 version = 2 [(google.api.field_behavior) = OPTIONAL]; } // A request to instantiate a workflow template. message InstantiateWorkflowTemplateRequest { - // Required. The "resource name" of the workflow template, as described - // in https://cloud.google.com/apis/design/resource_names of the form - // `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` - string name = 1; + // Required. The resource name of the workflow template, as described + // in https://cloud.google.com/apis/design/resource_names. + // + // * For `projects.regions.workflowTemplates.instantiate`, the resource name + // of the template has the following format: + // `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` + // + // * For `projects.locations.workflowTemplates.instantiate`, the resource name + // of the template has the following format: + // `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "dataproc.googleapis.com/WorkflowTemplate" + } + ]; // Optional. The version of workflow template to instantiate. If specified, // the workflow will be instantiated only if the current version of @@ -571,7 +651,7 @@ message InstantiateWorkflowTemplateRequest { // // This option cannot be used to instantiate a previous version of // workflow template. - int32 version = 2; + int32 version = 2 [(google.api.field_behavior) = OPTIONAL]; // Optional. A tag that prevents multiple concurrent workflow // instances with the same tag from running. This mitigates risk of @@ -582,22 +662,34 @@ message InstantiateWorkflowTemplateRequest { // // The tag must contain only letters (a-z, A-Z), numbers (0-9), // underscores (_), and hyphens (-). The maximum length is 40 characters. - string request_id = 5; + string request_id = 5 [(google.api.field_behavior) = OPTIONAL]; // Optional. Map from parameter names to values that should be used for those // parameters. Values may not exceed 100 characters. - map parameters = 6; + map parameters = 6 [(google.api.field_behavior) = OPTIONAL]; } // A request to instantiate an inline workflow template. message InstantiateInlineWorkflowTemplateRequest { - // Required. The "resource name" of the workflow template region, as described - // in https://cloud.google.com/apis/design/resource_names of the form - // `projects/{project_id}/regions/{region}` - string parent = 1; + // Required. The resource name of the region or location, as described + // in https://cloud.google.com/apis/design/resource_names. + // + // * For `projects.regions.workflowTemplates,instantiateinline`, the resource + // name of the region has the following format: + // `projects/{project_id}/regions/{region}` + // + // * For `projects.locations.workflowTemplates.instantiateinline`, the + // resource name of the location has the following format: + // `projects/{project_id}/locations/{location}` + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + child_type: "dataproc.googleapis.com/WorkflowTemplate" + } + ]; // Required. The workflow template to instantiate. - WorkflowTemplate template = 2; + WorkflowTemplate template = 2 [(google.api.field_behavior) = REQUIRED]; // Optional. A tag that prevents multiple concurrent workflow // instances with the same tag from running. This mitigates risk of @@ -608,7 +700,7 @@ message InstantiateInlineWorkflowTemplateRequest { // // The tag must contain only letters (a-z, A-Z), numbers (0-9), // underscores (_), and hyphens (-). The maximum length is 40 characters. - string request_id = 3; + string request_id = 3 [(google.api.field_behavior) = OPTIONAL]; } // A request to update a workflow template. @@ -616,46 +708,75 @@ message UpdateWorkflowTemplateRequest { // Required. The updated workflow template. // // The `template.version` field must match the current version. - WorkflowTemplate template = 1; + WorkflowTemplate template = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "dataproc.googleapis.com/WorkflowTemplate" + } + ]; } // A request to list workflow templates in a project. message ListWorkflowTemplatesRequest { - // Required. The "resource name" of the region, as described - // in https://cloud.google.com/apis/design/resource_names of the form - // `projects/{project_id}/regions/{region}` - string parent = 1; + // Required. The resource name of the region or location, as described + // in https://cloud.google.com/apis/design/resource_names. + // + // * For `projects.regions.workflowTemplates,list`, the resource + // name of the region has the following format: + // `projects/{project_id}/regions/{region}` + // + // * For `projects.locations.workflowTemplates.list`, the + // resource name of the location has the following format: + // `projects/{project_id}/locations/{location}` + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + child_type: "dataproc.googleapis.com/WorkflowTemplate" + } + ]; // Optional. The maximum number of results to return in each response. - int32 page_size = 2; + int32 page_size = 2 [(google.api.field_behavior) = OPTIONAL]; // Optional. The page token, returned by a previous call, to request the // next page of results. - string page_token = 3; + string page_token = 3 [(google.api.field_behavior) = OPTIONAL]; } // A response to a request to list workflow templates in a project. message ListWorkflowTemplatesResponse { // Output only. WorkflowTemplates list. - repeated WorkflowTemplate templates = 1; + repeated WorkflowTemplate templates = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. This token is included in the response if there are more // results to fetch. To fetch additional results, provide this value as the // page_token in a subsequent ListWorkflowTemplatesRequest. - string next_page_token = 2; + string next_page_token = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; } // A request to delete a workflow template. // // Currently started workflows will remain running. message DeleteWorkflowTemplateRequest { - // Required. The "resource name" of the workflow template, as described - // in https://cloud.google.com/apis/design/resource_names of the form - // `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` - string name = 1; + // Required. The resource name of the workflow template, as described + // in https://cloud.google.com/apis/design/resource_names. + // + // * For `projects.regions.workflowTemplates.delete`, the resource name + // of the template has the following format: + // `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` + // + // * For `projects.locations.workflowTemplates.instantiate`, the resource name + // of the template has the following format: + // `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "dataproc.googleapis.com/WorkflowTemplate" + } + ]; // Optional. The version of workflow template to delete. If specified, // will only delete the template if the current server version matches // specified version. - int32 version = 2; + int32 version = 2 [(google.api.field_behavior) = OPTIONAL]; } diff --git a/dataproc/google/cloud/dataproc_v1/proto/workflow_templates_pb2.py b/dataproc/google/cloud/dataproc_v1/proto/workflow_templates_pb2.py index a8f2903a1152..e539c2c176c2 100644 --- a/dataproc/google/cloud/dataproc_v1/proto/workflow_templates_pb2.py +++ b/dataproc/google/cloud/dataproc_v1/proto/workflow_templates_pb2.py @@ -16,6 +16,9 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 +from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 from google.cloud.dataproc_v1.proto import ( clusters_pb2 as google_dot_cloud_dot_dataproc__v1_dot_proto_dot_clusters__pb2, ) @@ -37,10 +40,13 @@ "\n\034com.google.cloud.dataproc.v1B\026WorkflowTemplatesProtoP\001Z@google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataproc" ), serialized_pb=_b( - '\n7google/cloud/dataproc_v1/proto/workflow_templates.proto\x12\x18google.cloud.dataproc.v1\x1a\x1cgoogle/api/annotations.proto\x1a-google/cloud/dataproc_v1/proto/clusters.proto\x1a)google/cloud/dataproc_v1/proto/jobs.proto\x1a#google/longrunning/operations.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xd3\x03\n\x10WorkflowTemplate\x12\n\n\x02id\x18\x02 \x01(\t\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x03 \x01(\x05\x12/\n\x0b\x63reate_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x46\n\x06labels\x18\x06 \x03(\x0b\x32\x36.google.cloud.dataproc.v1.WorkflowTemplate.LabelsEntry\x12\x46\n\tplacement\x18\x07 \x01(\x0b\x32\x33.google.cloud.dataproc.v1.WorkflowTemplatePlacement\x12\x32\n\x04jobs\x18\x08 \x03(\x0b\x32$.google.cloud.dataproc.v1.OrderedJob\x12?\n\nparameters\x18\t \x03(\x0b\x32+.google.cloud.dataproc.v1.TemplateParameter\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xb4\x01\n\x19WorkflowTemplatePlacement\x12\x43\n\x0fmanaged_cluster\x18\x01 \x01(\x0b\x32(.google.cloud.dataproc.v1.ManagedClusterH\x00\x12\x45\n\x10\x63luster_selector\x18\x02 \x01(\x0b\x32).google.cloud.dataproc.v1.ClusterSelectorH\x00\x42\x0b\n\tplacement"\xd4\x01\n\x0eManagedCluster\x12\x14\n\x0c\x63luster_name\x18\x02 \x01(\t\x12\x37\n\x06\x63onfig\x18\x03 \x01(\x0b\x32\'.google.cloud.dataproc.v1.ClusterConfig\x12\x44\n\x06labels\x18\x04 \x03(\x0b\x32\x34.google.cloud.dataproc.v1.ManagedCluster.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xab\x01\n\x0f\x43lusterSelector\x12\x0c\n\x04zone\x18\x01 \x01(\t\x12T\n\x0e\x63luster_labels\x18\x02 \x03(\x0b\x32<.google.cloud.dataproc.v1.ClusterSelector.ClusterLabelsEntry\x1a\x34\n\x12\x43lusterLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xd3\x04\n\nOrderedJob\x12\x0f\n\x07step_id\x18\x01 \x01(\t\x12\x39\n\nhadoop_job\x18\x02 \x01(\x0b\x32#.google.cloud.dataproc.v1.HadoopJobH\x00\x12\x37\n\tspark_job\x18\x03 \x01(\x0b\x32".google.cloud.dataproc.v1.SparkJobH\x00\x12;\n\x0bpyspark_job\x18\x04 \x01(\x0b\x32$.google.cloud.dataproc.v1.PySparkJobH\x00\x12\x35\n\x08hive_job\x18\x05 \x01(\x0b\x32!.google.cloud.dataproc.v1.HiveJobH\x00\x12\x33\n\x07pig_job\x18\x06 \x01(\x0b\x32 .google.cloud.dataproc.v1.PigJobH\x00\x12>\n\rspark_sql_job\x18\x07 \x01(\x0b\x32%.google.cloud.dataproc.v1.SparkSqlJobH\x00\x12@\n\x06labels\x18\x08 \x03(\x0b\x32\x30.google.cloud.dataproc.v1.OrderedJob.LabelsEntry\x12;\n\nscheduling\x18\t \x01(\x0b\x32\'.google.cloud.dataproc.v1.JobScheduling\x12\x1d\n\x15prerequisite_step_ids\x18\n \x03(\t\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\n\n\x08job_type"\x89\x01\n\x11TemplateParameter\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06\x66ields\x18\x02 \x03(\t\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\t\x12\x41\n\nvalidation\x18\x04 \x01(\x0b\x32-.google.cloud.dataproc.v1.ParameterValidation"\xa1\x01\n\x13ParameterValidation\x12:\n\x05regex\x18\x01 \x01(\x0b\x32).google.cloud.dataproc.v1.RegexValidationH\x00\x12;\n\x06values\x18\x02 \x01(\x0b\x32).google.cloud.dataproc.v1.ValueValidationH\x00\x42\x11\n\x0fvalidation_type""\n\x0fRegexValidation\x12\x0f\n\x07regexes\x18\x01 \x03(\t"!\n\x0fValueValidation\x12\x0e\n\x06values\x18\x01 \x03(\t"\xfd\x04\n\x10WorkflowMetadata\x12\x10\n\x08template\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\x05\x12\x42\n\x0e\x63reate_cluster\x18\x03 \x01(\x0b\x32*.google.cloud.dataproc.v1.ClusterOperation\x12\x36\n\x05graph\x18\x04 \x01(\x0b\x32\'.google.cloud.dataproc.v1.WorkflowGraph\x12\x42\n\x0e\x64\x65lete_cluster\x18\x05 \x01(\x0b\x32*.google.cloud.dataproc.v1.ClusterOperation\x12?\n\x05state\x18\x06 \x01(\x0e\x32\x30.google.cloud.dataproc.v1.WorkflowMetadata.State\x12\x14\n\x0c\x63luster_name\x18\x07 \x01(\t\x12N\n\nparameters\x18\x08 \x03(\x0b\x32:.google.cloud.dataproc.v1.WorkflowMetadata.ParametersEntry\x12.\n\nstart_time\x18\t \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\n \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x14\n\x0c\x63luster_uuid\x18\x0b \x01(\t\x1a\x31\n\x0fParametersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"8\n\x05State\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0b\n\x07PENDING\x10\x01\x12\x0b\n\x07RUNNING\x10\x02\x12\x08\n\x04\x44ONE\x10\x03"E\n\x10\x43lusterOperation\x12\x14\n\x0coperation_id\x18\x01 \x01(\t\x12\r\n\x05\x65rror\x18\x02 \x01(\t\x12\x0c\n\x04\x64one\x18\x03 \x01(\x08"F\n\rWorkflowGraph\x12\x35\n\x05nodes\x18\x01 \x03(\x0b\x32&.google.cloud.dataproc.v1.WorkflowNode"\x8a\x02\n\x0cWorkflowNode\x12\x0f\n\x07step_id\x18\x01 \x01(\t\x12\x1d\n\x15prerequisite_step_ids\x18\x02 \x03(\t\x12\x0e\n\x06job_id\x18\x03 \x01(\t\x12?\n\x05state\x18\x05 \x01(\x0e\x32\x30.google.cloud.dataproc.v1.WorkflowNode.NodeState\x12\r\n\x05\x65rror\x18\x06 \x01(\t"j\n\tNodeState\x12\x1a\n\x16NODE_STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07\x42LOCKED\x10\x01\x12\x0c\n\x08RUNNABLE\x10\x02\x12\x0b\n\x07RUNNING\x10\x03\x12\r\n\tCOMPLETED\x10\x04\x12\n\n\x06\x46\x41ILED\x10\x05"m\n\x1d\x43reateWorkflowTemplateRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12<\n\x08template\x18\x02 \x01(\x0b\x32*.google.cloud.dataproc.v1.WorkflowTemplate";\n\x1aGetWorkflowTemplateRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\x05"\xec\x01\n"InstantiateWorkflowTemplateRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\x05\x12\x12\n\nrequest_id\x18\x05 \x01(\t\x12`\n\nparameters\x18\x06 \x03(\x0b\x32L.google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest.ParametersEntry\x1a\x31\n\x0fParametersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x8c\x01\n(InstantiateInlineWorkflowTemplateRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12<\n\x08template\x18\x02 \x01(\x0b\x32*.google.cloud.dataproc.v1.WorkflowTemplate\x12\x12\n\nrequest_id\x18\x03 \x01(\t"]\n\x1dUpdateWorkflowTemplateRequest\x12<\n\x08template\x18\x01 \x01(\x0b\x32*.google.cloud.dataproc.v1.WorkflowTemplate"U\n\x1cListWorkflowTemplatesRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"w\n\x1dListWorkflowTemplatesResponse\x12=\n\ttemplates\x18\x01 \x03(\x0b\x32*.google.cloud.dataproc.v1.WorkflowTemplate\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t">\n\x1d\x44\x65leteWorkflowTemplateRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\x05\x32\xe2\x0e\n\x17WorkflowTemplateService\x12\x89\x02\n\x16\x43reateWorkflowTemplate\x12\x37.google.cloud.dataproc.v1.CreateWorkflowTemplateRequest\x1a*.google.cloud.dataproc.v1.WorkflowTemplate"\x89\x01\x82\xd3\xe4\x93\x02\x82\x01"5/v1/{parent=projects/*/locations/*}/workflowTemplates:\x08templateZ?"3/v1/{parent=projects/*/regions/*}/workflowTemplates:\x08template\x12\xed\x01\n\x13GetWorkflowTemplate\x12\x34.google.cloud.dataproc.v1.GetWorkflowTemplateRequest\x1a*.google.cloud.dataproc.v1.WorkflowTemplate"t\x82\xd3\xe4\x93\x02n\x12\x35/v1/{name=projects/*/locations/*/workflowTemplates/*}Z5\x12\x33/v1/{name=projects/*/regions/*/workflowTemplates/*}\x12\x90\x02\n\x1bInstantiateWorkflowTemplate\x12<.google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest\x1a\x1d.google.longrunning.Operation"\x93\x01\x82\xd3\xe4\x93\x02\x8c\x01"A/v1/{name=projects/*/locations/*/workflowTemplates/*}:instantiate:\x01*ZD"?/v1/{name=projects/*/regions/*/workflowTemplates/*}:instantiate:\x01*\x12\xb6\x02\n!InstantiateInlineWorkflowTemplate\x12\x42.google.cloud.dataproc.v1.InstantiateInlineWorkflowTemplateRequest\x1a\x1d.google.longrunning.Operation"\xad\x01\x82\xd3\xe4\x93\x02\xa6\x01"G/v1/{parent=projects/*/locations/*}/workflowTemplates:instantiateInline:\x08templateZQ"E/v1/{parent=projects/*/regions/*}/workflowTemplates:instantiateInline:\x08template\x12\x9b\x02\n\x16UpdateWorkflowTemplate\x12\x37.google.cloud.dataproc.v1.UpdateWorkflowTemplateRequest\x1a*.google.cloud.dataproc.v1.WorkflowTemplate"\x9b\x01\x82\xd3\xe4\x93\x02\x94\x01\x1a>/v1/{template.name=projects/*/locations/*/workflowTemplates/*}:\x08templateZH\x1a\n\rspark_sql_job\x18\x07 \x01(\x0b\x32%.google.cloud.dataproc.v1.SparkSqlJobH\x00\x12\x45\n\x06labels\x18\x08 \x03(\x0b\x32\x30.google.cloud.dataproc.v1.OrderedJob.LabelsEntryB\x03\xe0\x41\x01\x12@\n\nscheduling\x18\t \x01(\x0b\x32\'.google.cloud.dataproc.v1.JobSchedulingB\x03\xe0\x41\x01\x12"\n\x15prerequisite_step_ids\x18\n \x03(\tB\x03\xe0\x41\x01\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\n\n\x08job_type"\x9d\x01\n\x11TemplateParameter\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06\x66ields\x18\x02 \x03(\tB\x03\xe0\x41\x02\x12\x18\n\x0b\x64\x65scription\x18\x03 \x01(\tB\x03\xe0\x41\x01\x12\x46\n\nvalidation\x18\x04 \x01(\x0b\x32-.google.cloud.dataproc.v1.ParameterValidationB\x03\xe0\x41\x01"\xa1\x01\n\x13ParameterValidation\x12:\n\x05regex\x18\x01 \x01(\x0b\x32).google.cloud.dataproc.v1.RegexValidationH\x00\x12;\n\x06values\x18\x02 \x01(\x0b\x32).google.cloud.dataproc.v1.ValueValidationH\x00\x42\x11\n\x0fvalidation_type"\'\n\x0fRegexValidation\x12\x14\n\x07regexes\x18\x01 \x03(\tB\x03\xe0\x41\x02"&\n\x0fValueValidation\x12\x13\n\x06values\x18\x01 \x03(\tB\x03\xe0\x41\x02"\xaf\x05\n\x10WorkflowMetadata\x12\x15\n\x08template\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12\x14\n\x07version\x18\x02 \x01(\x05\x42\x03\xe0\x41\x03\x12G\n\x0e\x63reate_cluster\x18\x03 \x01(\x0b\x32*.google.cloud.dataproc.v1.ClusterOperationB\x03\xe0\x41\x03\x12;\n\x05graph\x18\x04 \x01(\x0b\x32\'.google.cloud.dataproc.v1.WorkflowGraphB\x03\xe0\x41\x03\x12G\n\x0e\x64\x65lete_cluster\x18\x05 \x01(\x0b\x32*.google.cloud.dataproc.v1.ClusterOperationB\x03\xe0\x41\x03\x12\x44\n\x05state\x18\x06 \x01(\x0e\x32\x30.google.cloud.dataproc.v1.WorkflowMetadata.StateB\x03\xe0\x41\x03\x12\x19\n\x0c\x63luster_name\x18\x07 \x01(\tB\x03\xe0\x41\x03\x12N\n\nparameters\x18\x08 \x03(\x0b\x32:.google.cloud.dataproc.v1.WorkflowMetadata.ParametersEntry\x12\x33\n\nstart_time\x18\t \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x31\n\x08\x65nd_time\x18\n \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x19\n\x0c\x63luster_uuid\x18\x0b \x01(\tB\x03\xe0\x41\x03\x1a\x31\n\x0fParametersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"8\n\x05State\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0b\n\x07PENDING\x10\x01\x12\x0b\n\x07RUNNING\x10\x02\x12\x08\n\x04\x44ONE\x10\x03"T\n\x10\x43lusterOperation\x12\x19\n\x0coperation_id\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12\x12\n\x05\x65rror\x18\x02 \x01(\tB\x03\xe0\x41\x03\x12\x11\n\x04\x64one\x18\x03 \x01(\x08\x42\x03\xe0\x41\x03"K\n\rWorkflowGraph\x12:\n\x05nodes\x18\x01 \x03(\x0b\x32&.google.cloud.dataproc.v1.WorkflowNodeB\x03\xe0\x41\x03"\xa3\x02\n\x0cWorkflowNode\x12\x14\n\x07step_id\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12"\n\x15prerequisite_step_ids\x18\x02 \x03(\tB\x03\xe0\x41\x03\x12\x13\n\x06job_id\x18\x03 \x01(\tB\x03\xe0\x41\x03\x12\x44\n\x05state\x18\x05 \x01(\x0e\x32\x30.google.cloud.dataproc.v1.WorkflowNode.NodeStateB\x03\xe0\x41\x03\x12\x12\n\x05\x65rror\x18\x06 \x01(\tB\x03\xe0\x41\x03"j\n\tNodeState\x12\x1a\n\x16NODE_STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07\x42LOCKED\x10\x01\x12\x0c\n\x08RUNNABLE\x10\x02\x12\x0b\n\x07RUNNING\x10\x03\x12\r\n\tCOMPLETED\x10\x04\x12\n\n\x06\x46\x41ILED\x10\x05"\xa4\x01\n\x1d\x43reateWorkflowTemplateRequest\x12@\n\x06parent\x18\x01 \x01(\tB0\xe0\x41\x02\xfa\x41*\x12(dataproc.googleapis.com/WorkflowTemplate\x12\x41\n\x08template\x18\x02 \x01(\x0b\x32*.google.cloud.dataproc.v1.WorkflowTemplateB\x03\xe0\x41\x02"r\n\x1aGetWorkflowTemplateRequest\x12>\n\x04name\x18\x01 \x01(\tB0\xe0\x41\x02\xfa\x41*\n(dataproc.googleapis.com/WorkflowTemplate\x12\x14\n\x07version\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01"\xad\x02\n"InstantiateWorkflowTemplateRequest\x12>\n\x04name\x18\x01 \x01(\tB0\xe0\x41\x02\xfa\x41*\n(dataproc.googleapis.com/WorkflowTemplate\x12\x14\n\x07version\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01\x12\x17\n\nrequest_id\x18\x05 \x01(\tB\x03\xe0\x41\x01\x12\x65\n\nparameters\x18\x06 \x03(\x0b\x32L.google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest.ParametersEntryB\x03\xe0\x41\x01\x1a\x31\n\x0fParametersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xc8\x01\n(InstantiateInlineWorkflowTemplateRequest\x12@\n\x06parent\x18\x01 \x01(\tB0\xe0\x41\x02\xfa\x41*\x12(dataproc.googleapis.com/WorkflowTemplate\x12\x41\n\x08template\x18\x02 \x01(\x0b\x32*.google.cloud.dataproc.v1.WorkflowTemplateB\x03\xe0\x41\x02\x12\x17\n\nrequest_id\x18\x03 \x01(\tB\x03\xe0\x41\x01"\x8f\x01\n\x1dUpdateWorkflowTemplateRequest\x12n\n\x08template\x18\x01 \x01(\x0b\x32*.google.cloud.dataproc.v1.WorkflowTemplateB0\xe0\x41\x02\xfa\x41*\n(dataproc.googleapis.com/WorkflowTemplate"\x91\x01\n\x1cListWorkflowTemplatesRequest\x12@\n\x06parent\x18\x01 \x01(\tB0\xe0\x41\x02\xfa\x41*\x12(dataproc.googleapis.com/WorkflowTemplate\x12\x16\n\tpage_size\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01\x12\x17\n\npage_token\x18\x03 \x01(\tB\x03\xe0\x41\x01"\x81\x01\n\x1dListWorkflowTemplatesResponse\x12\x42\n\ttemplates\x18\x01 \x03(\x0b\x32*.google.cloud.dataproc.v1.WorkflowTemplateB\x03\xe0\x41\x03\x12\x1c\n\x0fnext_page_token\x18\x02 \x01(\tB\x03\xe0\x41\x03"u\n\x1d\x44\x65leteWorkflowTemplateRequest\x12>\n\x04name\x18\x01 \x01(\tB0\xe0\x41\x02\xfa\x41*\n(dataproc.googleapis.com/WorkflowTemplate\x12\x14\n\x07version\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01\x32\xe6\x10\n\x17WorkflowTemplateService\x12\x9b\x02\n\x16\x43reateWorkflowTemplate\x12\x37.google.cloud.dataproc.v1.CreateWorkflowTemplateRequest\x1a*.google.cloud.dataproc.v1.WorkflowTemplate"\x9b\x01\x82\xd3\xe4\x93\x02\x82\x01"5/v1/{parent=projects/*/locations/*}/workflowTemplates:\x08templateZ?"3/v1/{parent=projects/*/regions/*}/workflowTemplates:\x08template\xda\x41\x0fparent,template\x12\xf4\x01\n\x13GetWorkflowTemplate\x12\x34.google.cloud.dataproc.v1.GetWorkflowTemplateRequest\x1a*.google.cloud.dataproc.v1.WorkflowTemplate"{\x82\xd3\xe4\x93\x02n\x12\x35/v1/{name=projects/*/locations/*/workflowTemplates/*}Z5\x12\x33/v1/{name=projects/*/regions/*/workflowTemplates/*}\xda\x41\x04name\x12\xd5\x02\n\x1bInstantiateWorkflowTemplate\x12<.google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest\x1a\x1d.google.longrunning.Operation"\xd8\x01\x82\xd3\xe4\x93\x02\x8c\x01"A/v1/{name=projects/*/locations/*/workflowTemplates/*}:instantiate:\x01*ZD"?/v1/{name=projects/*/regions/*/workflowTemplates/*}:instantiate:\x01*\xda\x41\x04name\xda\x41\x0fname,parameters\xca\x41)\n\x15google.protobuf.Empty\x12\x10WorkflowMetadata\x12\xf4\x02\n!InstantiateInlineWorkflowTemplate\x12\x42.google.cloud.dataproc.v1.InstantiateInlineWorkflowTemplateRequest\x1a\x1d.google.longrunning.Operation"\xeb\x01\x82\xd3\xe4\x93\x02\xa6\x01"G/v1/{parent=projects/*/locations/*}/workflowTemplates:instantiateInline:\x08templateZQ"E/v1/{parent=projects/*/regions/*}/workflowTemplates:instantiateInline:\x08template\xda\x41\x0fparent,template\xca\x41)\n\x15google.protobuf.Empty\x12\x10WorkflowMetadata\x12\xa6\x02\n\x16UpdateWorkflowTemplate\x12\x37.google.cloud.dataproc.v1.UpdateWorkflowTemplateRequest\x1a*.google.cloud.dataproc.v1.WorkflowTemplate"\xa6\x01\x82\xd3\xe4\x93\x02\x94\x01\x1a>/v1/{template.name=projects/*/locations/*/workflowTemplates/*}:\x08templateZH\x1a/v1/{template.name=projects/*/locations/*/workflowTemplates/*}:\010templateZH\032/v1/{template.name=projects/*/locations/*/workflowTemplates/*}:\010templateZH\032>> response = client.create_autoscaling_policy(parent, policy) Args: - parent (str): Required. The "resource name" of the region, as described in - https://cloud.google.com/apis/design/resource\_names of the form - ``projects/{project_id}/regions/{region}``. - policy (Union[dict, ~google.cloud.dataproc_v1beta2.types.AutoscalingPolicy]): The autoscaling policy to create. + parent (str): Required. The "resource name" of the region or location, as described in + https://cloud.google.com/apis/design/resource\_names. + + - For ``projects.regions.autoscalingPolicies.create``, the resource + name has the following format: + ``projects/{project_id}/regions/{region}`` + + - For ``projects.locations.autoscalingPolicies.create``, the resource + name has the following format: + ``projects/{project_id}/locations/{location}`` + policy (Union[dict, ~google.cloud.dataproc_v1beta2.types.AutoscalingPolicy]): Required. The autoscaling policy to create. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.dataproc_v1beta2.types.AutoscalingPolicy` @@ -385,8 +392,15 @@ def get_autoscaling_policy( Args: name (str): Required. The "resource name" of the autoscaling policy, as described in - https://cloud.google.com/apis/design/resource\_names of the form - ``projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}``. + https://cloud.google.com/apis/design/resource\_names. + + - For ``projects.regions.autoscalingPolicies.get``, the resource name + of the policy has the following format: + ``projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}`` + + - For ``projects.locations.autoscalingPolicies.get``, the resource name + of the policy has the following format: + ``projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}`` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will be retried using a default configuration. @@ -468,9 +482,16 @@ def list_autoscaling_policies( ... pass Args: - parent (str): Required. The "resource name" of the region, as described in - https://cloud.google.com/apis/design/resource\_names of the form - ``projects/{project_id}/regions/{region}`` + parent (str): Required. The "resource name" of the region or location, as described in + https://cloud.google.com/apis/design/resource\_names. + + - For ``projects.regions.autoscalingPolicies.list``, the resource name + of the region has the following format: + ``projects/{project_id}/regions/{region}`` + + - For ``projects.locations.autoscalingPolicies.list``, the resource + name of the location has the following format: + ``projects/{project_id}/locations/{location}`` page_size (int): The maximum number of resources contained in the underlying API response. If page streaming is performed per- resource, this parameter does not affect the return value. If page @@ -562,8 +583,15 @@ def delete_autoscaling_policy( Args: name (str): Required. The "resource name" of the autoscaling policy, as described in - https://cloud.google.com/apis/design/resource\_names of the form - ``projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}``. + https://cloud.google.com/apis/design/resource\_names. + + - For ``projects.regions.autoscalingPolicies.delete``, the resource + name of the policy has the following format: + ``projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}`` + + - For ``projects.locations.autoscalingPolicies.delete``, the resource + name of the policy has the following format: + ``projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}`` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will be retried using a default configuration. diff --git a/dataproc/google/cloud/dataproc_v1beta2/gapic/cluster_controller_client.py b/dataproc/google/cloud/dataproc_v1beta2/gapic/cluster_controller_client.py index 1c422994b9f5..246638c9473b 100644 --- a/dataproc/google/cloud/dataproc_v1beta2/gapic/cluster_controller_client.py +++ b/dataproc/google/cloud/dataproc_v1beta2/gapic/cluster_controller_client.py @@ -207,7 +207,9 @@ def create_cluster( metadata=None, ): """ - Creates a cluster in a project. + Creates a cluster in a project. The returned ``Operation.metadata`` will + be + `ClusterOperationMetadata `__. Example: >>> from google.cloud import dataproc_v1beta2 @@ -310,7 +312,9 @@ def update_cluster( metadata=None, ): """ - Updates a cluster in a project. + Updates a cluster in a project. The returned ``Operation.metadata`` will + be + `ClusterOperationMetadata `__. Example: >>> from google.cloud import dataproc_v1beta2 @@ -505,7 +509,9 @@ def delete_cluster( metadata=None, ): """ - Deletes a cluster in a project. + Deletes a cluster in a project. The returned ``Operation.metadata`` will + be + `ClusterOperationMetadata `__. Example: >>> from google.cloud import dataproc_v1beta2 @@ -795,8 +801,11 @@ def diagnose_cluster( metadata=None, ): """ - Gets cluster diagnostic information. After the operation completes, the - Operation.response field contains ``DiagnoseClusterOutputLocation``. + Gets cluster diagnostic information. The returned ``Operation.metadata`` + will be + `ClusterOperationMetadata `__. + After the operation completes, ``Operation.response`` contains + `Empty `__. Example: >>> from google.cloud import dataproc_v1beta2 diff --git a/dataproc/google/cloud/dataproc_v1beta2/gapic/transports/cluster_controller_grpc_transport.py b/dataproc/google/cloud/dataproc_v1beta2/gapic/transports/cluster_controller_grpc_transport.py index 72cee7d4b47b..b708113b7879 100644 --- a/dataproc/google/cloud/dataproc_v1beta2/gapic/transports/cluster_controller_grpc_transport.py +++ b/dataproc/google/cloud/dataproc_v1beta2/gapic/transports/cluster_controller_grpc_transport.py @@ -119,7 +119,9 @@ def channel(self): def create_cluster(self): """Return the gRPC stub for :meth:`ClusterControllerClient.create_cluster`. - Creates a cluster in a project. + Creates a cluster in a project. The returned ``Operation.metadata`` will + be + `ClusterOperationMetadata `__. Returns: Callable: A callable which accepts the appropriate @@ -132,7 +134,9 @@ def create_cluster(self): def update_cluster(self): """Return the gRPC stub for :meth:`ClusterControllerClient.update_cluster`. - Updates a cluster in a project. + Updates a cluster in a project. The returned ``Operation.metadata`` will + be + `ClusterOperationMetadata `__. Returns: Callable: A callable which accepts the appropriate @@ -145,7 +149,9 @@ def update_cluster(self): def delete_cluster(self): """Return the gRPC stub for :meth:`ClusterControllerClient.delete_cluster`. - Deletes a cluster in a project. + Deletes a cluster in a project. The returned ``Operation.metadata`` will + be + `ClusterOperationMetadata `__. Returns: Callable: A callable which accepts the appropriate @@ -184,8 +190,11 @@ def list_clusters(self): def diagnose_cluster(self): """Return the gRPC stub for :meth:`ClusterControllerClient.diagnose_cluster`. - Gets cluster diagnostic information. After the operation completes, the - Operation.response field contains ``DiagnoseClusterOutputLocation``. + Gets cluster diagnostic information. The returned ``Operation.metadata`` + will be + `ClusterOperationMetadata `__. + After the operation completes, ``Operation.response`` contains + `Empty `__. Returns: Callable: A callable which accepts the appropriate diff --git a/dataproc/google/cloud/dataproc_v1beta2/gapic/transports/workflow_template_service_grpc_transport.py b/dataproc/google/cloud/dataproc_v1beta2/gapic/transports/workflow_template_service_grpc_transport.py index 86bc98b33810..14398811aedf 100644 --- a/dataproc/google/cloud/dataproc_v1beta2/gapic/transports/workflow_template_service_grpc_transport.py +++ b/dataproc/google/cloud/dataproc_v1beta2/gapic/transports/workflow_template_service_grpc_transport.py @@ -160,7 +160,10 @@ def instantiate_workflow_template(self): cause any inflight jobs to be cancelled and workflow-owned clusters to be deleted. - The ``Operation.metadata`` will be ``WorkflowMetadata``. + The ``Operation.metadata`` will be + `WorkflowMetadata `__. + Also see `Using + WorkflowMetadata `__. On successful completion, ``Operation.response`` will be ``Empty``. @@ -189,7 +192,10 @@ def instantiate_inline_workflow_template(self): cause any inflight jobs to be cancelled and workflow-owned clusters to be deleted. - The ``Operation.metadata`` will be ``WorkflowMetadata``. + The ``Operation.metadata`` will be + `WorkflowMetadata `__. + Also see `Using + WorkflowMetadata `__. On successful completion, ``Operation.response`` will be ``Empty``. diff --git a/dataproc/google/cloud/dataproc_v1beta2/gapic/workflow_template_service_client.py b/dataproc/google/cloud/dataproc_v1beta2/gapic/workflow_template_service_client.py index f7ad32daf927..9e67cd3f6e35 100644 --- a/dataproc/google/cloud/dataproc_v1beta2/gapic/workflow_template_service_client.py +++ b/dataproc/google/cloud/dataproc_v1beta2/gapic/workflow_template_service_client.py @@ -243,9 +243,16 @@ def create_workflow_template( >>> response = client.create_workflow_template(parent, template) Args: - parent (str): Required. The "resource name" of the region, as described in - https://cloud.google.com/apis/design/resource\_names of the form - ``projects/{project_id}/regions/{region}`` + parent (str): Required. The resource name of the region or location, as described in + https://cloud.google.com/apis/design/resource\_names. + + - For ``projects.regions.workflowTemplates,create``, the resource name + of the region has the following format: + ``projects/{project_id}/regions/{region}`` + + - For ``projects.locations.workflowTemplates.create``, the resource + name of the location has the following format: + ``projects/{project_id}/locations/{location}`` template (Union[dict, ~google.cloud.dataproc_v1beta2.types.WorkflowTemplate]): Required. The Dataproc workflow template to create. If a dict is provided, it must be of the same form as the protobuf @@ -324,11 +331,18 @@ def get_workflow_template( >>> response = client.get_workflow_template(name) Args: - name (str): Required. The "resource name" of the workflow template, as described in - https://cloud.google.com/apis/design/resource\_names of the form - ``projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`` + name (str): Required. The resource name of the workflow template, as described in + https://cloud.google.com/apis/design/resource\_names. + + - For ``projects.regions.workflowTemplates.get``, the resource name of + the template has the following format: + ``projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`` + + - For ``projects.locations.workflowTemplates.get``, the resource name + of the template has the following format: + ``projects/{project_id}/locations/{location}/workflowTemplates/{template_id}`` version (int): Optional. The version of workflow template to retrieve. Only previously - instatiated versions can be retrieved. + instantiated versions can be retrieved. If unspecified, retrieves the current version. retry (Optional[google.api_core.retry.Retry]): A retry object used @@ -403,7 +417,10 @@ def instantiate_workflow_template( cause any inflight jobs to be cancelled and workflow-owned clusters to be deleted. - The ``Operation.metadata`` will be ``WorkflowMetadata``. + The ``Operation.metadata`` will be + `WorkflowMetadata `__. + Also see `Using + WorkflowMetadata `__. On successful completion, ``Operation.response`` will be ``Empty``. @@ -426,9 +443,16 @@ def instantiate_workflow_template( >>> metadata = response.metadata() Args: - name (str): Required. The "resource name" of the workflow template, as described in - https://cloud.google.com/apis/design/resource\_names of the form - ``projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`` + name (str): Required. The resource name of the workflow template, as described in + https://cloud.google.com/apis/design/resource\_names. + + - For ``projects.regions.workflowTemplates.instantiate``, the resource + name of the template has the following format: + ``projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`` + + - For ``projects.locations.workflowTemplates.instantiate``, the + resource name of the template has the following format: + ``projects/{project_id}/locations/{location}/workflowTemplates/{template_id}`` version (int): Optional. The version of workflow template to instantiate. If specified, the workflow will be instantiated only if the current version of the workflow template has the supplied version. @@ -534,7 +558,10 @@ def instantiate_inline_workflow_template( cause any inflight jobs to be cancelled and workflow-owned clusters to be deleted. - The ``Operation.metadata`` will be ``WorkflowMetadata``. + The ``Operation.metadata`` will be + `WorkflowMetadata `__. + Also see `Using + WorkflowMetadata `__. On successful completion, ``Operation.response`` will be ``Empty``. @@ -560,9 +587,16 @@ def instantiate_inline_workflow_template( >>> metadata = response.metadata() Args: - parent (str): Required. The "resource name" of the workflow template region, as - described in https://cloud.google.com/apis/design/resource\_names of the - form ``projects/{project_id}/regions/{region}`` + parent (str): Required. The resource name of the region or location, as described in + https://cloud.google.com/apis/design/resource\_names. + + - For ``projects.regions.workflowTemplates,instantiateinline``, the + resource name of the region has the following format: + ``projects/{project_id}/regions/{region}`` + + - For ``projects.locations.workflowTemplates.instantiateinline``, the + resource name of the location has the following format: + ``projects/{project_id}/locations/{location}`` template (Union[dict, ~google.cloud.dataproc_v1beta2.types.WorkflowTemplate]): Required. The workflow template to instantiate. If a dict is provided, it must be of the same form as the protobuf @@ -751,9 +785,16 @@ def list_workflow_templates( ... pass Args: - parent (str): Required. The "resource name" of the region, as described in - https://cloud.google.com/apis/design/resource\_names of the form - ``projects/{project_id}/regions/{region}`` + parent (str): Required. The resource name of the region or location, as described in + https://cloud.google.com/apis/design/resource\_names. + + - For ``projects.regions.workflowTemplates,list``, the resource name of + the region has the following format: + ``projects/{project_id}/regions/{region}`` + + - For ``projects.locations.workflowTemplates.list``, the resource name + of the location has the following format: + ``projects/{project_id}/locations/{location}`` page_size (int): The maximum number of resources contained in the underlying API response. If page streaming is performed per- resource, this parameter does not affect the return value. If page @@ -844,9 +885,16 @@ def delete_workflow_template( >>> client.delete_workflow_template(name) Args: - name (str): Required. The "resource name" of the workflow template, as described in - https://cloud.google.com/apis/design/resource\_names of the form - ``projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`` + name (str): Required. The resource name of the workflow template, as described in + https://cloud.google.com/apis/design/resource\_names. + + - For ``projects.regions.workflowTemplates.delete``, the resource name + of the template has the following format: + ``projects/{project_id}/regions/{region}/workflowTemplates/{template_id}`` + + - For ``projects.locations.workflowTemplates.instantiate``, the + resource name of the template has the following format: + ``projects/{project_id}/locations/{location}/workflowTemplates/{template_id}`` version (int): Optional. The version of workflow template to delete. If specified, will only delete the template if the current server version matches specified version. diff --git a/dataproc/google/cloud/dataproc_v1beta2/proto/autoscaling_policies.proto b/dataproc/google/cloud/dataproc_v1beta2/proto/autoscaling_policies.proto index 0c3efbd067ae..36d507c82638 100644 --- a/dataproc/google/cloud/dataproc_v1beta2/proto/autoscaling_policies.proto +++ b/dataproc/google/cloud/dataproc_v1beta2/proto/autoscaling_policies.proto @@ -18,12 +18,11 @@ syntax = "proto3"; package google.cloud.dataproc.v1beta2; import "google/api/annotations.proto"; -import "google/cloud/dataproc/v1beta2/clusters.proto"; -import "google/cloud/dataproc/v1beta2/jobs.proto"; -import "google/longrunning/operations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; import "google/protobuf/duration.proto"; import "google/protobuf/empty.proto"; -import "google/protobuf/timestamp.proto"; option go_package = "google.golang.org/genproto/googleapis/cloud/dataproc/v1beta2;dataproc"; option java_multiple_files = true; @@ -33,6 +32,9 @@ option java_package = "com.google.cloud.dataproc.v1beta2"; // The API interface for managing autoscaling policies in the // Google Cloud Dataproc API. service AutoscalingPolicyService { + option (google.api.default_host) = "dataproc.googleapis.com"; + option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; + // Creates new autoscaling policy. rpc CreateAutoscalingPolicy(CreateAutoscalingPolicyRequest) returns (AutoscalingPolicy) { option (google.api.http) = { @@ -43,6 +45,7 @@ service AutoscalingPolicyService { body: "policy" } }; + option (google.api.method_signature) = "parent,policy"; } // Updates (replaces) autoscaling policy. @@ -58,6 +61,7 @@ service AutoscalingPolicyService { body: "policy" } }; + option (google.api.method_signature) = "policy"; } // Retrieves autoscaling policy. @@ -68,6 +72,7 @@ service AutoscalingPolicyService { get: "/v1beta2/{name=projects/*/regions/*/autoscalingPolicies/*}" } }; + option (google.api.method_signature) = "name"; } // Lists autoscaling policies in the project. @@ -78,6 +83,7 @@ service AutoscalingPolicyService { get: "/v1beta2/{parent=projects/*/regions/*}/autoscalingPolicies" } }; + option (google.api.method_signature) = "parent"; } // Deletes an autoscaling policy. It is an error to delete an autoscaling @@ -89,22 +95,37 @@ service AutoscalingPolicyService { delete: "/v1beta2/{name=projects/*/regions/*/autoscalingPolicies/*}" } }; + option (google.api.method_signature) = "name"; } } // Describes an autoscaling policy for Dataproc cluster autoscaler. message AutoscalingPolicy { + option (google.api.resource) = { + type: "dataproc.googleapis.com/AutoscalingPolicy" + pattern: "projects/{project}/regions/{region}/autoscalingPolicies/{autoscaling_policy}" + pattern: "projects/{project}/locations/{location}/autoscalingPolicies/{autoscaling_policy}" + history: ORIGINALLY_SINGLE_PATTERN + }; + // Required. The policy id. // // The id must contain only letters (a-z, A-Z), numbers (0-9), // underscores (_), and hyphens (-). Cannot begin or end with underscore // or hyphen. Must consist of between 3 and 50 characters. - string id = 1; + string id = 1 [(google.api.field_behavior) = REQUIRED]; - // Output only. The "resource name" of the policy, as described - // in https://cloud.google.com/apis/design/resource_names of the form - // `projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}`. - string name = 2; + // Output only. The "resource name" of the autoscaling policy, as described + // in https://cloud.google.com/apis/design/resource_names. + // + // * For `projects.regions.autoscalingPolicies`, the resource name of the + // policy has the following format: + // `projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}` + // + // * For `projects.locations.autoscalingPolicies`, the resource name of the + // policy has the following format: + // `projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}` + string name = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; // Required. Autoscaling algorithm for policy. oneof algorithm { @@ -112,22 +133,22 @@ message AutoscalingPolicy { } // Required. Describes how the autoscaler will operate for primary workers. - InstanceGroupAutoscalingPolicyConfig worker_config = 4; + InstanceGroupAutoscalingPolicyConfig worker_config = 4 [(google.api.field_behavior) = REQUIRED]; // Optional. Describes how the autoscaler will operate for secondary workers. - InstanceGroupAutoscalingPolicyConfig secondary_worker_config = 5; + InstanceGroupAutoscalingPolicyConfig secondary_worker_config = 5 [(google.api.field_behavior) = OPTIONAL]; } // Basic algorithm for autoscaling. message BasicAutoscalingAlgorithm { // Required. YARN autoscaling configuration. - BasicYarnAutoscalingConfig yarn_config = 1; + BasicYarnAutoscalingConfig yarn_config = 1 [(google.api.field_behavior) = REQUIRED]; // Optional. Duration between scaling events. A scaling period starts after // the update operation from the previous event has completed. // // Bounds: [2m, 1d]. Default: 2m. - google.protobuf.Duration cooldown_period = 2; + google.protobuf.Duration cooldown_period = 2 [(google.api.field_behavior) = OPTIONAL]; } // Basic autoscaling configurations for YARN. @@ -138,7 +159,7 @@ message BasicYarnAutoscalingConfig { // downscaling operations. // // Bounds: [0s, 1d]. - google.protobuf.Duration graceful_decommission_timeout = 5; + google.protobuf.Duration graceful_decommission_timeout = 5 [(google.api.field_behavior) = REQUIRED]; // Required. Fraction of average pending memory in the last cooldown period // for which to add workers. A scale-up factor of 1.0 will result in scaling @@ -147,7 +168,7 @@ message BasicYarnAutoscalingConfig { // magnitude of scaling up (less aggressive scaling). // // Bounds: [0.0, 1.0]. - double scale_up_factor = 1; + double scale_up_factor = 1 [(google.api.field_behavior) = REQUIRED]; // Required. Fraction of average pending memory in the last cooldown period // for which to remove workers. A scale-down factor of 1 will result in @@ -156,7 +177,7 @@ message BasicYarnAutoscalingConfig { // removing workers, which can be beneficial for autoscaling a single job. // // Bounds: [0.0, 1.0]. - double scale_down_factor = 2; + double scale_down_factor = 2 [(google.api.field_behavior) = REQUIRED]; // Optional. Minimum scale-up threshold as a fraction of total cluster size // before scaling occurs. For example, in a 20-worker cluster, a threshold of @@ -165,7 +186,7 @@ message BasicYarnAutoscalingConfig { // on any recommended change. // // Bounds: [0.0, 1.0]. Default: 0.0. - double scale_up_min_worker_fraction = 3; + double scale_up_min_worker_fraction = 3 [(google.api.field_behavior) = OPTIONAL]; // Optional. Minimum scale-down threshold as a fraction of total cluster size // before scaling occurs. For example, in a 20-worker cluster, a threshold of @@ -174,7 +195,7 @@ message BasicYarnAutoscalingConfig { // on any recommended change. // // Bounds: [0.0, 1.0]. Default: 0.0. - double scale_down_min_worker_fraction = 4; + double scale_down_min_worker_fraction = 4 [(google.api.field_behavior) = OPTIONAL]; } // Configuration for the size bounds of an instance group, including its @@ -184,7 +205,7 @@ message InstanceGroupAutoscalingPolicyConfig { // // Primary workers - Bounds: [2, max_instances]. Default: 2. // Secondary workers - Bounds: [0, max_instances]. Default: 0. - int32 min_instances = 1; + int32 min_instances = 1 [(google.api.field_behavior) = OPTIONAL]; // Optional. Maximum number of instances for this group. Required for primary // workers. Note that by default, clusters will not use secondary workers. @@ -192,7 +213,7 @@ message InstanceGroupAutoscalingPolicyConfig { // // Primary workers - Bounds: [min_instances, ). Required. // Secondary workers - Bounds: [min_instances, ). Default: 0. - int32 max_instances = 2; + int32 max_instances = 2 [(google.api.field_behavior) = OPTIONAL]; // Optional. Weight for the instance group, which is used to determine the // fraction of total workers in the cluster from this instance group. @@ -212,32 +233,61 @@ message InstanceGroupAutoscalingPolicyConfig { // zero weight on the unset group. For example if weight is set only on // primary workers, the cluster will use primary workers only and no // secondary workers. - int32 weight = 3; + int32 weight = 3 [(google.api.field_behavior) = OPTIONAL]; } // A request to create an autoscaling policy. message CreateAutoscalingPolicyRequest { - // Required. The "resource name" of the region, as described - // in https://cloud.google.com/apis/design/resource_names of the form - // `projects/{project_id}/regions/{region}`. - string parent = 1; - - // The autoscaling policy to create. - AutoscalingPolicy policy = 2; + // Required. The "resource name" of the region or location, as described + // in https://cloud.google.com/apis/design/resource_names. + // + // * For `projects.regions.autoscalingPolicies.create`, the resource name + // has the following format: + // `projects/{project_id}/regions/{region}` + // + // * For `projects.locations.autoscalingPolicies.create`, the resource name + // has the following format: + // `projects/{project_id}/locations/{location}` + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + child_type: "dataproc.googleapis.com/AutoscalingPolicy" + } + ]; + + // Required. The autoscaling policy to create. + AutoscalingPolicy policy = 2 [(google.api.field_behavior) = REQUIRED]; } // A request to fetch an autoscaling policy. message GetAutoscalingPolicyRequest { // Required. The "resource name" of the autoscaling policy, as described - // in https://cloud.google.com/apis/design/resource_names of the form - // `projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}`. - string name = 1; + // in https://cloud.google.com/apis/design/resource_names. + // + // * For `projects.regions.autoscalingPolicies.get`, the resource name + // of the policy has the following format: + // `projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}` + // + // * For `projects.locations.autoscalingPolicies.get`, the resource name + // of the policy has the following format: + // `projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "dataproc.googleapis.com/AutoscalingPolicy" + } + ]; } // A request to update an autoscaling policy. message UpdateAutoscalingPolicyRequest { // Required. The updated autoscaling policy. - AutoscalingPolicy policy = 1; + AutoscalingPolicy policy = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "dataproc.googleapis.com/AutoscalingPolicy" + } + ]; } // A request to delete an autoscaling policy. @@ -245,32 +295,57 @@ message UpdateAutoscalingPolicyRequest { // Autoscaling policies in use by one or more clusters will not be deleted. message DeleteAutoscalingPolicyRequest { // Required. The "resource name" of the autoscaling policy, as described - // in https://cloud.google.com/apis/design/resource_names of the form - // `projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}`. - string name = 1; + // in https://cloud.google.com/apis/design/resource_names. + // + // * For `projects.regions.autoscalingPolicies.delete`, the resource name + // of the policy has the following format: + // `projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}` + // + // * For `projects.locations.autoscalingPolicies.delete`, the resource name + // of the policy has the following format: + // `projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "dataproc.googleapis.com/AutoscalingPolicy" + } + ]; } // A request to list autoscaling policies in a project. message ListAutoscalingPoliciesRequest { - // Required. The "resource name" of the region, as described - // in https://cloud.google.com/apis/design/resource_names of the form - // `projects/{project_id}/regions/{region}` - string parent = 1; + // Required. The "resource name" of the region or location, as described + // in https://cloud.google.com/apis/design/resource_names. + // + // * For `projects.regions.autoscalingPolicies.list`, the resource name + // of the region has the following format: + // `projects/{project_id}/regions/{region}` + // + // * For `projects.locations.autoscalingPolicies.list`, the resource name + // of the location has the following format: + // `projects/{project_id}/locations/{location}` + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + child_type: "dataproc.googleapis.com/AutoscalingPolicy" + } + ]; // Optional. The maximum number of results to return in each response. - int32 page_size = 2; + // Must be less than or equal to 1000. Defaults to 100. + int32 page_size = 2 [(google.api.field_behavior) = OPTIONAL]; // Optional. The page token, returned by a previous call, to request the // next page of results. - string page_token = 3; + string page_token = 3 [(google.api.field_behavior) = OPTIONAL]; } // A response to a request to list autoscaling policies in a project. message ListAutoscalingPoliciesResponse { // Output only. Autoscaling policies list. - repeated AutoscalingPolicy policies = 1; + repeated AutoscalingPolicy policies = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. This token is included in the response if there are more // results to fetch. - string next_page_token = 2; + string next_page_token = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; } diff --git a/dataproc/google/cloud/dataproc_v1beta2/proto/autoscaling_policies_pb2.py b/dataproc/google/cloud/dataproc_v1beta2/proto/autoscaling_policies_pb2.py index 880fe573d879..50c0c54dd6fd 100644 --- a/dataproc/google/cloud/dataproc_v1beta2/proto/autoscaling_policies_pb2.py +++ b/dataproc/google/cloud/dataproc_v1beta2/proto/autoscaling_policies_pb2.py @@ -16,18 +16,11 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.cloud.dataproc_v1beta2.proto import ( - clusters_pb2 as google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_clusters__pb2, -) -from google.cloud.dataproc_v1beta2.proto import ( - jobs_pb2 as google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_jobs__pb2, -) -from google.longrunning import ( - operations_pb2 as google_dot_longrunning_dot_operations__pb2, -) +from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 +from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 DESCRIPTOR = _descriptor.FileDescriptor( @@ -38,16 +31,15 @@ "\n!com.google.cloud.dataproc.v1beta2B\030AutoscalingPoliciesProtoP\001ZEgoogle.golang.org/genproto/googleapis/cloud/dataproc/v1beta2;dataproc" ), serialized_pb=_b( - '\n>google/cloud/dataproc_v1beta2/proto/autoscaling_policies.proto\x12\x1dgoogle.cloud.dataproc.v1beta2\x1a\x1cgoogle/api/annotations.proto\x1a\x32google/cloud/dataproc_v1beta2/proto/clusters.proto\x1a.google/cloud/dataproc_v1beta2/proto/jobs.proto\x1a#google/longrunning/operations.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xd1\x02\n\x11\x41utoscalingPolicy\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12S\n\x0f\x62\x61sic_algorithm\x18\x03 \x01(\x0b\x32\x38.google.cloud.dataproc.v1beta2.BasicAutoscalingAlgorithmH\x00\x12Z\n\rworker_config\x18\x04 \x01(\x0b\x32\x43.google.cloud.dataproc.v1beta2.InstanceGroupAutoscalingPolicyConfig\x12\x64\n\x17secondary_worker_config\x18\x05 \x01(\x0b\x32\x43.google.cloud.dataproc.v1beta2.InstanceGroupAutoscalingPolicyConfigB\x0b\n\talgorithm"\x9f\x01\n\x19\x42\x61sicAutoscalingAlgorithm\x12N\n\x0byarn_config\x18\x01 \x01(\x0b\x32\x39.google.cloud.dataproc.v1beta2.BasicYarnAutoscalingConfig\x12\x32\n\x0f\x63ooldown_period\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration"\xe0\x01\n\x1a\x42\x61sicYarnAutoscalingConfig\x12@\n\x1dgraceful_decommission_timeout\x18\x05 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x17\n\x0fscale_up_factor\x18\x01 \x01(\x01\x12\x19\n\x11scale_down_factor\x18\x02 \x01(\x01\x12$\n\x1cscale_up_min_worker_fraction\x18\x03 \x01(\x01\x12&\n\x1escale_down_min_worker_fraction\x18\x04 \x01(\x01"d\n$InstanceGroupAutoscalingPolicyConfig\x12\x15\n\rmin_instances\x18\x01 \x01(\x05\x12\x15\n\rmax_instances\x18\x02 \x01(\x05\x12\x0e\n\x06weight\x18\x03 \x01(\x05"r\n\x1e\x43reateAutoscalingPolicyRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12@\n\x06policy\x18\x02 \x01(\x0b\x32\x30.google.cloud.dataproc.v1beta2.AutoscalingPolicy"+\n\x1bGetAutoscalingPolicyRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"b\n\x1eUpdateAutoscalingPolicyRequest\x12@\n\x06policy\x18\x01 \x01(\x0b\x32\x30.google.cloud.dataproc.v1beta2.AutoscalingPolicy".\n\x1e\x44\x65leteAutoscalingPolicyRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"W\n\x1eListAutoscalingPoliciesRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"~\n\x1fListAutoscalingPoliciesResponse\x12\x42\n\x08policies\x18\x01 \x03(\x0b\x32\x30.google.cloud.dataproc.v1beta2.AutoscalingPolicy\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t2\x92\x0b\n\x18\x41utoscalingPolicyService\x12\xa0\x02\n\x17\x43reateAutoscalingPolicy\x12=.google.cloud.dataproc.v1beta2.CreateAutoscalingPolicyRequest\x1a\x30.google.cloud.dataproc.v1beta2.AutoscalingPolicy"\x93\x01\x82\xd3\xe4\x93\x02\x8c\x01".google.cloud.dataproc.v1beta2.ListAutoscalingPoliciesResponse"\x82\x01\x82\xd3\xe4\x93\x02|\x12google/cloud/dataproc_v1beta2/proto/autoscaling_policies.proto\x12\x1dgoogle.cloud.dataproc.v1beta2\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto"\xb9\x04\n\x11\x41utoscalingPolicy\x12\x0f\n\x02id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x11\n\x04name\x18\x02 \x01(\tB\x03\xe0\x41\x03\x12S\n\x0f\x62\x61sic_algorithm\x18\x03 \x01(\x0b\x32\x38.google.cloud.dataproc.v1beta2.BasicAutoscalingAlgorithmH\x00\x12_\n\rworker_config\x18\x04 \x01(\x0b\x32\x43.google.cloud.dataproc.v1beta2.InstanceGroupAutoscalingPolicyConfigB\x03\xe0\x41\x02\x12i\n\x17secondary_worker_config\x18\x05 \x01(\x0b\x32\x43.google.cloud.dataproc.v1beta2.InstanceGroupAutoscalingPolicyConfigB\x03\xe0\x41\x01:\xd1\x01\xea\x41\xcd\x01\n)dataproc.googleapis.com/AutoscalingPolicy\x12Lprojects/{project}/regions/{region}/autoscalingPolicies/{autoscaling_policy}\x12Pprojects/{project}/locations/{location}/autoscalingPolicies/{autoscaling_policy} \x01\x42\x0b\n\talgorithm"\xa9\x01\n\x19\x42\x61sicAutoscalingAlgorithm\x12S\n\x0byarn_config\x18\x01 \x01(\x0b\x32\x39.google.cloud.dataproc.v1beta2.BasicYarnAutoscalingConfigB\x03\xe0\x41\x02\x12\x37\n\x0f\x63ooldown_period\x18\x02 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x01"\xf9\x01\n\x1a\x42\x61sicYarnAutoscalingConfig\x12\x45\n\x1dgraceful_decommission_timeout\x18\x05 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x02\x12\x1c\n\x0fscale_up_factor\x18\x01 \x01(\x01\x42\x03\xe0\x41\x02\x12\x1e\n\x11scale_down_factor\x18\x02 \x01(\x01\x42\x03\xe0\x41\x02\x12)\n\x1cscale_up_min_worker_fraction\x18\x03 \x01(\x01\x42\x03\xe0\x41\x01\x12+\n\x1escale_down_min_worker_fraction\x18\x04 \x01(\x01\x42\x03\xe0\x41\x01"s\n$InstanceGroupAutoscalingPolicyConfig\x12\x1a\n\rmin_instances\x18\x01 \x01(\x05\x42\x03\xe0\x41\x01\x12\x1a\n\rmax_instances\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01\x12\x13\n\x06weight\x18\x03 \x01(\x05\x42\x03\xe0\x41\x01"\xaa\x01\n\x1e\x43reateAutoscalingPolicyRequest\x12\x41\n\x06parent\x18\x01 \x01(\tB1\xe0\x41\x02\xfa\x41+\x12)dataproc.googleapis.com/AutoscalingPolicy\x12\x45\n\x06policy\x18\x02 \x01(\x0b\x32\x30.google.cloud.dataproc.v1beta2.AutoscalingPolicyB\x03\xe0\x41\x02"^\n\x1bGetAutoscalingPolicyRequest\x12?\n\x04name\x18\x01 \x01(\tB1\xe0\x41\x02\xfa\x41+\n)dataproc.googleapis.com/AutoscalingPolicy"\x95\x01\n\x1eUpdateAutoscalingPolicyRequest\x12s\n\x06policy\x18\x01 \x01(\x0b\x32\x30.google.cloud.dataproc.v1beta2.AutoscalingPolicyB1\xe0\x41\x02\xfa\x41+\n)dataproc.googleapis.com/AutoscalingPolicy"a\n\x1e\x44\x65leteAutoscalingPolicyRequest\x12?\n\x04name\x18\x01 \x01(\tB1\xe0\x41\x02\xfa\x41+\n)dataproc.googleapis.com/AutoscalingPolicy"\x94\x01\n\x1eListAutoscalingPoliciesRequest\x12\x41\n\x06parent\x18\x01 \x01(\tB1\xe0\x41\x02\xfa\x41+\x12)dataproc.googleapis.com/AutoscalingPolicy\x12\x16\n\tpage_size\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01\x12\x17\n\npage_token\x18\x03 \x01(\tB\x03\xe0\x41\x01"\x88\x01\n\x1fListAutoscalingPoliciesResponse\x12G\n\x08policies\x18\x01 \x03(\x0b\x32\x30.google.cloud.dataproc.v1beta2.AutoscalingPolicyB\x03\xe0\x41\x03\x12\x1c\n\x0fnext_page_token\x18\x02 \x01(\tB\x03\xe0\x41\x03\x32\x8f\x0c\n\x18\x41utoscalingPolicyService\x12\xb0\x02\n\x17\x43reateAutoscalingPolicy\x12=.google.cloud.dataproc.v1beta2.CreateAutoscalingPolicyRequest\x1a\x30.google.cloud.dataproc.v1beta2.AutoscalingPolicy"\xa3\x01\x82\xd3\xe4\x93\x02\x8c\x01".google.cloud.dataproc.v1beta2.ListAutoscalingPoliciesResponse"\x8b\x01\x82\xd3\xe4\x93\x02|\x12 labels = 8; + map labels = 8 [(google.api.field_behavior) = OPTIONAL]; // Output only. Cluster status. - ClusterStatus status = 4; + ClusterStatus status = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The previous cluster status. - repeated ClusterStatus status_history = 7; + repeated ClusterStatus status_history = 7 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. A cluster UUID (Unique Universal Identifier). Cloud Dataproc // generates this value when it creates the cluster. - string cluster_uuid = 6; + string cluster_uuid = 6 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. Contains cluster daemon metrics such as HDFS and YARN stats. // // **Beta Feature**: This report is available for testing purposes only. It // may be changed before final release. - ClusterMetrics metrics = 9; + ClusterMetrics metrics = 9 [(google.api.field_behavior) = OUTPUT_ONLY]; } // The cluster config. @@ -132,29 +170,29 @@ message ClusterConfig { // and manage this project-level, per-location bucket (see // [Cloud Dataproc staging // bucket](/dataproc/docs/concepts/configuring-clusters/staging-bucket)). - string config_bucket = 1; + string config_bucket = 1 [(google.api.field_behavior) = OPTIONAL]; // Optional. The shared Compute Engine config settings for // all instances in a cluster. - GceClusterConfig gce_cluster_config = 8; + GceClusterConfig gce_cluster_config = 8 [(google.api.field_behavior) = OPTIONAL]; // Optional. The Compute Engine config settings for // the master instance in a cluster. - InstanceGroupConfig master_config = 9; + InstanceGroupConfig master_config = 9 [(google.api.field_behavior) = OPTIONAL]; // Optional. The Compute Engine config settings for // worker instances in a cluster. - InstanceGroupConfig worker_config = 10; + InstanceGroupConfig worker_config = 10 [(google.api.field_behavior) = OPTIONAL]; // Optional. The Compute Engine config settings for // additional worker instances in a cluster. - InstanceGroupConfig secondary_worker_config = 12; + InstanceGroupConfig secondary_worker_config = 12 [(google.api.field_behavior) = OPTIONAL]; // Optional. The config settings for software inside the cluster. - SoftwareConfig software_config = 13; + SoftwareConfig software_config = 13 [(google.api.field_behavior) = OPTIONAL]; // Optional. The config setting for auto delete cluster schedule. - LifecycleConfig lifecycle_config = 14; + LifecycleConfig lifecycle_config = 14 [(google.api.field_behavior) = OPTIONAL]; // Optional. Commands to execute on each node after config is // completed. By default, executables are run on master and all worker nodes. @@ -169,31 +207,31 @@ message ClusterConfig { // else // ... worker specific actions ... // fi - repeated NodeInitializationAction initialization_actions = 11; + repeated NodeInitializationAction initialization_actions = 11 [(google.api.field_behavior) = OPTIONAL]; // Optional. Encryption settings for the cluster. - EncryptionConfig encryption_config = 15; + EncryptionConfig encryption_config = 15 [(google.api.field_behavior) = OPTIONAL]; // Optional. Autoscaling config for the policy associated with the cluster. // Cluster does not autoscale if this field is unset. - AutoscalingConfig autoscaling_config = 16; + AutoscalingConfig autoscaling_config = 16 [(google.api.field_behavior) = OPTIONAL]; // Optional. Port/endpoint configuration for this cluster - EndpointConfig endpoint_config = 17; + EndpointConfig endpoint_config = 17 [(google.api.field_behavior) = OPTIONAL]; // Optional. Security related configuration. - SecurityConfig security_config = 18; + SecurityConfig security_config = 18 [(google.api.field_behavior) = OPTIONAL]; } // Endpoint config for this cluster message EndpointConfig { // Output only. The map of port descriptions to URLs. Will only be populated // if enable_http_port_access is true. - map http_ports = 1; + map http_ports = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; // Optional. If true, enable http access to specific ports on the cluster // from external sources. Defaults to false. - bool enable_http_port_access = 2; + bool enable_http_port_access = 2 [(google.api.field_behavior) = OPTIONAL]; } // Autoscaling Policy config associated with the cluster. @@ -207,14 +245,14 @@ message AutoscalingConfig { // * `projects/[project_id]/locations/[dataproc_region]/autoscalingPolicies/[policy_id]` // // Note that the policy must be in the same project and Cloud Dataproc region. - string policy_uri = 1; + string policy_uri = 1 [(google.api.field_behavior) = OPTIONAL]; } // Encryption settings for the cluster. message EncryptionConfig { // Optional. The Cloud KMS key name to use for PD disk encryption for all // instances in the cluster. - string gce_pd_kms_key_name = 1; + string gce_pd_kms_key_name = 1 [(google.api.field_behavior) = OPTIONAL]; } // Common config settings for resources of Compute Engine cluster @@ -231,7 +269,7 @@ message GceClusterConfig { // * `https://www.googleapis.com/compute/v1/projects/[project_id]/zones/[zone]` // * `projects/[project_id]/zones/[zone]` // * `us-central1-f` - string zone_uri = 1; + string zone_uri = 1 [(google.api.field_behavior) = OPTIONAL]; // Optional. The Compute Engine network to be used for machine // communications. Cannot be specified with subnetwork_uri. If neither @@ -244,7 +282,7 @@ message GceClusterConfig { // * `https://www.googleapis.com/compute/v1/projects/[project_id]/regions/global/default` // * `projects/[project_id]/regions/global/default` // * `default` - string network_uri = 2; + string network_uri = 2 [(google.api.field_behavior) = OPTIONAL]; // Optional. The Compute Engine subnetwork to be used for machine // communications. Cannot be specified with network_uri. @@ -254,7 +292,7 @@ message GceClusterConfig { // * `https://www.googleapis.com/compute/v1/projects/[project_id]/regions/us-east1/subnetworks/sub0` // * `projects/[project_id]/regions/us-east1/subnetworks/sub0` // * `sub0` - string subnetwork_uri = 6; + string subnetwork_uri = 6 [(google.api.field_behavior) = OPTIONAL]; // Optional. If true, all instances in the cluster will only have internal IP // addresses. By default, clusters are not restricted to internal IP @@ -262,7 +300,7 @@ message GceClusterConfig { // instance. This `internal_ip_only` restriction can only be enabled for // subnetwork enabled networks, and all off-cluster dependencies must be // configured to be accessible without external IP addresses. - bool internal_ip_only = 7; + bool internal_ip_only = 7 [(google.api.field_behavior) = OPTIONAL]; // Optional. The service account of the instances. Defaults to the default // Compute Engine service account. Custom service accounts need @@ -275,7 +313,7 @@ message GceClusterConfig { // https://cloud.google.com/compute/docs/access/service-accounts#custom_service_accounts // for more information). // Example: `[account_id]@[project_id].iam.gserviceaccount.com` - string service_account = 8; + string service_account = 8 [(google.api.field_behavior) = OPTIONAL]; // Optional. The URIs of service account scopes to be included in // Compute Engine instances. The following base set of scopes is always @@ -291,7 +329,7 @@ message GceClusterConfig { // * https://www.googleapis.com/auth/bigtable.admin.table // * https://www.googleapis.com/auth/bigtable.data // * https://www.googleapis.com/auth/devstorage.full_control - repeated string service_account_scopes = 3; + repeated string service_account_scopes = 3 [(google.api.field_behavior) = OPTIONAL]; // The Compute Engine tags to add to all instances (see // [Tagging instances](/compute/docs/label-or-tag-resources#tags)). @@ -303,24 +341,24 @@ message GceClusterConfig { map metadata = 5; // Optional. Reservation Affinity for consuming Zonal reservation. - ReservationAffinity reservation_affinity = 11; + ReservationAffinity reservation_affinity = 11 [(google.api.field_behavior) = OPTIONAL]; } -// Optional. The config settings for Compute Engine resources in +// The config settings for Compute Engine resources in // an instance group, such as a master or worker group. message InstanceGroupConfig { // Optional. The number of VM instances in the instance group. // For master instance groups, must be set to 1. - int32 num_instances = 1; + int32 num_instances = 1 [(google.api.field_behavior) = OPTIONAL]; // Output only. The list of instance names. Cloud Dataproc derives the names // from `cluster_name`, `num_instances`, and the instance group. - repeated string instance_names = 2; + repeated string instance_names = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; // Optional. The Compute Engine image resource used for cluster // instances. It can be specified or may be inferred from // `SoftwareConfig.image_version`. - string image_uri = 3; + string image_uri = 3 [(google.api.field_behavior) = OPTIONAL]; // Optional. The Compute Engine machine type used for cluster instances. // @@ -335,28 +373,25 @@ message InstanceGroupConfig { // Placement](/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) // feature, you must use the short name of the machine type // resource, for example, `n1-standard-2`. - string machine_type_uri = 4; + string machine_type_uri = 4 [(google.api.field_behavior) = OPTIONAL]; // Optional. Disk option config settings. - DiskConfig disk_config = 5; + DiskConfig disk_config = 5 [(google.api.field_behavior) = OPTIONAL]; // Optional. Specifies that this instance group contains preemptible // instances. - bool is_preemptible = 6; + bool is_preemptible = 6 [(google.api.field_behavior) = OPTIONAL]; // Output only. The config for Compute Engine Instance Group // Manager that manages this group. // This is only used for preemptible instance groups. - ManagedGroupConfig managed_group_config = 7; + ManagedGroupConfig managed_group_config = 7 [(google.api.field_behavior) = OUTPUT_ONLY]; // Optional. The Compute Engine accelerator configuration for these // instances. - // - // **Beta Feature**: This feature is still under development. It may be - // changed before final release. - repeated AcceleratorConfig accelerators = 8; + repeated AcceleratorConfig accelerators = 8 [(google.api.field_behavior) = OPTIONAL]; - // Optional. Specifies the minimum cpu platform for the Instance Group. + // Specifies the minimum cpu platform for the Instance Group. // See [Cloud Dataproc→Minimum CPU Platform] // (/dataproc/docs/concepts/compute/dataproc-min-cpu). string min_cpu_platform = 9; @@ -366,10 +401,10 @@ message InstanceGroupConfig { message ManagedGroupConfig { // Output only. The name of the Instance Template used for the Managed // Instance Group. - string instance_template_name = 1; + string instance_template_name = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The name of the Instance Group Manager for this group. - string instance_group_manager_name = 2; + string instance_group_manager_name = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; } // Specifies the type and number of accelerator cards attached to the instances @@ -401,12 +436,12 @@ message DiskConfig { // Optional. Type of the boot disk (default is "pd-standard"). // Valid values: "pd-ssd" (Persistent Disk Solid State Drive) or // "pd-standard" (Persistent Disk Hard Disk Drive). - string boot_disk_type = 3; + string boot_disk_type = 3 [(google.api.field_behavior) = OPTIONAL]; // Optional. Size in GB of the boot disk (default is 500GB). - int32 boot_disk_size_gb = 1; + int32 boot_disk_size_gb = 1 [(google.api.field_behavior) = OPTIONAL]; - // Optional. Number of attached SSDs, from 0 to 4 (default is 0). + // Number of attached SSDs, from 0 to 4 (default is 0). // If SSDs are not attached, the boot disk is used to store runtime logs and // [HDFS](https://hadoop.apache.org/docs/r1.2.1/hdfs_user_guide.html) data. // If one or more SSDs are attached, this runtime bulk @@ -423,9 +458,9 @@ message LifecycleConfig { // // Example: **"10m"**, the minimum value, to delete the // cluster when it has had no jobs running for 10 minutes. - google.protobuf.Duration idle_delete_ttl = 1; + google.protobuf.Duration idle_delete_ttl = 1 [(google.api.field_behavior) = OPTIONAL]; - // Optional. Either the exact time the cluster should be deleted at or + // Either the exact time the cluster should be deleted at or // the cluster maximum age. oneof ttl { // Optional. The time when cluster will be auto-deleted. @@ -437,6 +472,10 @@ message LifecycleConfig { // Example: **"1d"**, to delete the cluster 1 day after its creation.. google.protobuf.Duration auto_delete_ttl = 3; } + + // Output only. The time when cluster became idle (most recent job finished) + // and became eligible for deletion due to idleness. + google.protobuf.Timestamp idle_start_time = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; } // Security related configuration, including encryption, Kerberos, etc. @@ -448,79 +487,83 @@ message SecurityConfig { // Specifies Kerberos related configuration. message KerberosConfig { // Optional. Flag to indicate whether to Kerberize the cluster. - bool enable_kerberos = 1; + bool enable_kerberos = 1 [(google.api.field_behavior) = OPTIONAL]; // Required. The Cloud Storage URI of a KMS encrypted file containing the root // principal password. - string root_principal_password_uri = 2; + string root_principal_password_uri = 2 [(google.api.field_behavior) = REQUIRED]; // Required. The uri of the KMS key used to encrypt various sensitive // files. - string kms_key_uri = 3; + string kms_key_uri = 3 [(google.api.field_behavior) = REQUIRED]; // Optional. The Cloud Storage URI of the keystore file used for SSL // encryption. If not provided, Dataproc will provide a self-signed // certificate. - string keystore_uri = 4; + string keystore_uri = 4 [(google.api.field_behavior) = OPTIONAL]; // Optional. The Cloud Storage URI of the truststore file used for SSL // encryption. If not provided, Dataproc will provide a self-signed // certificate. - string truststore_uri = 5; + string truststore_uri = 5 [(google.api.field_behavior) = OPTIONAL]; // Optional. The Cloud Storage URI of a KMS encrypted file containing the // password to the user provided keystore. For the self-signed certificate, // this password is generated by Dataproc. - string keystore_password_uri = 6; + string keystore_password_uri = 6 [(google.api.field_behavior) = OPTIONAL]; // Optional. The Cloud Storage URI of a KMS encrypted file containing the // password to the user provided key. For the self-signed certificate, this // password is generated by Dataproc. - string key_password_uri = 7; + string key_password_uri = 7 [(google.api.field_behavior) = OPTIONAL]; // Optional. The Cloud Storage URI of a KMS encrypted file containing the // password to the user provided truststore. For the self-signed certificate, // this password is generated by Dataproc. - string truststore_password_uri = 8; + string truststore_password_uri = 8 [(google.api.field_behavior) = OPTIONAL]; // Optional. The remote realm the Dataproc on-cluster KDC will trust, should // the user enable cross realm trust. - string cross_realm_trust_realm = 9; + string cross_realm_trust_realm = 9 [(google.api.field_behavior) = OPTIONAL]; // Optional. The KDC (IP or hostname) for the remote trusted realm in a cross // realm trust relationship. - string cross_realm_trust_kdc = 10; + string cross_realm_trust_kdc = 10 [(google.api.field_behavior) = OPTIONAL]; // Optional. The admin server (IP or hostname) for the remote trusted realm in // a cross realm trust relationship. - string cross_realm_trust_admin_server = 11; + string cross_realm_trust_admin_server = 11 [(google.api.field_behavior) = OPTIONAL]; // Optional. The Cloud Storage URI of a KMS encrypted file containing the // shared password between the on-cluster Kerberos realm and the remote // trusted realm, in a cross realm trust relationship. - string cross_realm_trust_shared_password_uri = 12; + string cross_realm_trust_shared_password_uri = 12 [(google.api.field_behavior) = OPTIONAL]; // Optional. The Cloud Storage URI of a KMS encrypted file containing the // master key of the KDC database. - string kdc_db_key_uri = 13; + string kdc_db_key_uri = 13 [(google.api.field_behavior) = OPTIONAL]; // Optional. The lifetime of the ticket granting ticket, in hours. // If not specified, or user specifies 0, then default value 10 // will be used. - int32 tgt_lifetime_hours = 14; + int32 tgt_lifetime_hours = 14 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The name of the on-cluster Kerberos realm. + // If not specified, the uppercased domain of hostnames will be the realm. + string realm = 15 [(google.api.field_behavior) = OPTIONAL]; } // Specifies an executable to run on a fully configured node and a // timeout period for executable completion. message NodeInitializationAction { // Required. Cloud Storage URI of executable file. - string executable_file = 1; + string executable_file = 1 [(google.api.field_behavior) = REQUIRED]; // Optional. Amount of time executable has to complete. Default is // 10 minutes. Cluster creation fails with an explanatory error message (the // name of the executable that caused the error and the exceeded timeout // period) if the executable is not completed at end of the timeout period. - google.protobuf.Duration execution_timeout = 2; + google.protobuf.Duration execution_timeout = 2 [(google.api.field_behavior) = OPTIONAL]; } // The status of a cluster and its instances. @@ -566,17 +609,17 @@ message ClusterStatus { } // Output only. The cluster's state. - State state = 1; + State state = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. Optional details of cluster's state. - string detail = 2; + string detail = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. Time when this state was entered. - google.protobuf.Timestamp state_start_time = 3; + google.protobuf.Timestamp state_start_time = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. Additional state information that includes // status reported by the agent. - Substate substate = 4; + Substate substate = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; } // Specifies the selection and config of software inside the cluster. @@ -588,7 +631,7 @@ message SoftwareConfig { // ["preview" // version](/dataproc/docs/concepts/versioning/dataproc-versions#other_versions). // If unspecified, it defaults to the latest Debian version. - string image_version = 1; + string image_version = 1 [(google.api.field_behavior) = OPTIONAL]; // Optional. The properties to set on daemon config files. // @@ -608,7 +651,7 @@ message SoftwareConfig { // // For more information, see // [Cluster properties](/dataproc/docs/concepts/cluster-properties). - map properties = 2; + map properties = 2 [(google.api.field_behavior) = OPTIONAL]; // The set of optional components to activate on the cluster. repeated Component optional_components = 3; @@ -630,13 +673,13 @@ message ClusterMetrics { message CreateClusterRequest { // Required. The ID of the Google Cloud Platform project that the cluster // belongs to. - string project_id = 1; + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; // Required. The Cloud Dataproc region in which to handle the request. - string region = 3; + string region = 3 [(google.api.field_behavior) = REQUIRED]; // Required. The cluster to create. - Cluster cluster = 2; + Cluster cluster = 2 [(google.api.field_behavior) = REQUIRED]; // Optional. A unique id used to identify the request. If the server // receives two [CreateClusterRequest][google.cloud.dataproc.v1beta2.CreateClusterRequest] requests with the same @@ -649,23 +692,23 @@ message CreateClusterRequest { // // The id must contain only letters (a-z, A-Z), numbers (0-9), // underscores (_), and hyphens (-). The maximum length is 40 characters. - string request_id = 4; + string request_id = 4 [(google.api.field_behavior) = OPTIONAL]; } // A request to update a cluster. message UpdateClusterRequest { // Required. The ID of the Google Cloud Platform project the // cluster belongs to. - string project_id = 1; + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; // Required. The Cloud Dataproc region in which to handle the request. - string region = 5; + string region = 5 [(google.api.field_behavior) = REQUIRED]; // Required. The cluster name. - string cluster_name = 2; + string cluster_name = 2 [(google.api.field_behavior) = REQUIRED]; // Required. The changes to the cluster. - Cluster cluster = 3; + Cluster cluster = 3 [(google.api.field_behavior) = REQUIRED]; // Optional. Timeout for graceful YARN decomissioning. Graceful // decommissioning allows removing nodes from the cluster without @@ -675,7 +718,7 @@ message UpdateClusterRequest { // the maximum allowed timeout is 1 day. // // Only supported on Dataproc image versions 1.2 and higher. - google.protobuf.Duration graceful_decommission_timeout = 6; + google.protobuf.Duration graceful_decommission_timeout = 6 [(google.api.field_behavior) = OPTIONAL]; // Required. Specifies the path, relative to `Cluster`, of // the field to update. For example, to change the number of workers @@ -737,7 +780,7 @@ message UpdateClusterRequest { // autoscaling policies // // - google.protobuf.FieldMask update_mask = 4; + google.protobuf.FieldMask update_mask = 4 [(google.api.field_behavior) = REQUIRED]; // Optional. A unique id used to identify the request. If the server // receives two [UpdateClusterRequest][google.cloud.dataproc.v1beta2.UpdateClusterRequest] requests with the same @@ -750,24 +793,24 @@ message UpdateClusterRequest { // // The id must contain only letters (a-z, A-Z), numbers (0-9), // underscores (_), and hyphens (-). The maximum length is 40 characters. - string request_id = 7; + string request_id = 7 [(google.api.field_behavior) = OPTIONAL]; } // A request to delete a cluster. message DeleteClusterRequest { // Required. The ID of the Google Cloud Platform project that the cluster // belongs to. - string project_id = 1; + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; // Required. The Cloud Dataproc region in which to handle the request. - string region = 3; + string region = 3 [(google.api.field_behavior) = REQUIRED]; // Required. The cluster name. - string cluster_name = 2; + string cluster_name = 2 [(google.api.field_behavior) = REQUIRED]; // Optional. Specifying the `cluster_uuid` means the RPC should fail // (with error NOT_FOUND) if cluster with specified UUID does not exist. - string cluster_uuid = 4; + string cluster_uuid = 4 [(google.api.field_behavior) = OPTIONAL]; // Optional. A unique id used to identify the request. If the server // receives two [DeleteClusterRequest][google.cloud.dataproc.v1beta2.DeleteClusterRequest] requests with the same @@ -780,32 +823,32 @@ message DeleteClusterRequest { // // The id must contain only letters (a-z, A-Z), numbers (0-9), // underscores (_), and hyphens (-). The maximum length is 40 characters. - string request_id = 5; + string request_id = 5 [(google.api.field_behavior) = OPTIONAL]; } // Request to get the resource representation for a cluster in a project. message GetClusterRequest { // Required. The ID of the Google Cloud Platform project that the cluster // belongs to. - string project_id = 1; + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; // Required. The Cloud Dataproc region in which to handle the request. - string region = 3; + string region = 3 [(google.api.field_behavior) = REQUIRED]; // Required. The cluster name. - string cluster_name = 2; + string cluster_name = 2 [(google.api.field_behavior) = REQUIRED]; } // A request to list the clusters in a project. message ListClustersRequest { // Required. The ID of the Google Cloud Platform project that the cluster // belongs to. - string project_id = 1; + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; // Required. The Cloud Dataproc region in which to handle the request. - string region = 4; + string region = 4 [(google.api.field_behavior) = REQUIRED]; - // Optional. A filter constraining the clusters to list. Filters are + // Optional. A filter constraining the clusters to list. Filters are // case-sensitive and have the following syntax: // // field = value [AND [field = value]] ... @@ -824,37 +867,37 @@ message ListClustersRequest { // // status.state = ACTIVE AND clusterName = mycluster // AND labels.env = staging AND labels.starred = * - string filter = 5; + string filter = 5 [(google.api.field_behavior) = OPTIONAL]; // Optional. The standard List page size. - int32 page_size = 2; + int32 page_size = 2 [(google.api.field_behavior) = OPTIONAL]; // Optional. The standard List page token. - string page_token = 3; + string page_token = 3 [(google.api.field_behavior) = OPTIONAL]; } // The list of all clusters in a project. message ListClustersResponse { // Output only. The clusters in the project. - repeated Cluster clusters = 1; + repeated Cluster clusters = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. This token is included in the response if there are more // results to fetch. To fetch additional results, provide this value as the // `page_token` in a subsequent ListClustersRequest. - string next_page_token = 2; + string next_page_token = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; } // A request to collect cluster diagnostic information. message DiagnoseClusterRequest { // Required. The ID of the Google Cloud Platform project that the cluster // belongs to. - string project_id = 1; + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; // Required. The Cloud Dataproc region in which to handle the request. - string region = 3; + string region = 3 [(google.api.field_behavior) = REQUIRED]; // Required. The cluster name. - string cluster_name = 2; + string cluster_name = 2 [(google.api.field_behavior) = REQUIRED]; } // The location of diagnostic output. @@ -862,7 +905,7 @@ message DiagnoseClusterResults { // Output only. The Cloud Storage URI of the diagnostic output. // The output report is a plain text file with a summary of collected // diagnostics. - string output_uri = 1; + string output_uri = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; } // Reservation Affinity for consuming Zonal reservation. @@ -883,11 +926,11 @@ message ReservationAffinity { } // Optional. Type of reservation to consume - Type consume_reservation_type = 1; + Type consume_reservation_type = 1 [(google.api.field_behavior) = OPTIONAL]; // Optional. Corresponds to the label key of reservation resource. - string key = 2; + string key = 2 [(google.api.field_behavior) = OPTIONAL]; // Optional. Corresponds to the label values of reservation resource. - repeated string values = 3; + repeated string values = 3 [(google.api.field_behavior) = OPTIONAL]; } diff --git a/dataproc/google/cloud/dataproc_v1beta2/proto/clusters_pb2.py b/dataproc/google/cloud/dataproc_v1beta2/proto/clusters_pb2.py index bfe5208a5e7e..48f0feafeee9 100644 --- a/dataproc/google/cloud/dataproc_v1beta2/proto/clusters_pb2.py +++ b/dataproc/google/cloud/dataproc_v1beta2/proto/clusters_pb2.py @@ -17,6 +17,8 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 from google.cloud.dataproc_v1beta2.proto import ( operations_pb2 as google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_operations__pb2, ) @@ -39,10 +41,12 @@ "\n!com.google.cloud.dataproc.v1beta2B\rClustersProtoP\001ZEgoogle.golang.org/genproto/googleapis/cloud/dataproc/v1beta2;dataproc" ), serialized_pb=_b( - '\n2google/cloud/dataproc_v1beta2/proto/clusters.proto\x12\x1dgoogle.cloud.dataproc.v1beta2\x1a\x1cgoogle/api/annotations.proto\x1a\x34google/cloud/dataproc_v1beta2/proto/operations.proto\x1a\x30google/cloud/dataproc_v1beta2/proto/shared.proto\x1a#google/longrunning/operations.proto\x1a\x1egoogle/protobuf/duration.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xbe\x03\n\x07\x43luster\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x14\n\x0c\x63luster_name\x18\x02 \x01(\t\x12<\n\x06\x63onfig\x18\x03 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.ClusterConfig\x12\x42\n\x06labels\x18\x08 \x03(\x0b\x32\x32.google.cloud.dataproc.v1beta2.Cluster.LabelsEntry\x12<\n\x06status\x18\x04 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.ClusterStatus\x12\x44\n\x0estatus_history\x18\x07 \x03(\x0b\x32,.google.cloud.dataproc.v1beta2.ClusterStatus\x12\x14\n\x0c\x63luster_uuid\x18\x06 \x01(\t\x12>\n\x07metrics\x18\t \x01(\x0b\x32-.google.cloud.dataproc.v1beta2.ClusterMetrics\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xf3\x06\n\rClusterConfig\x12\x15\n\rconfig_bucket\x18\x01 \x01(\t\x12K\n\x12gce_cluster_config\x18\x08 \x01(\x0b\x32/.google.cloud.dataproc.v1beta2.GceClusterConfig\x12I\n\rmaster_config\x18\t \x01(\x0b\x32\x32.google.cloud.dataproc.v1beta2.InstanceGroupConfig\x12I\n\rworker_config\x18\n \x01(\x0b\x32\x32.google.cloud.dataproc.v1beta2.InstanceGroupConfig\x12S\n\x17secondary_worker_config\x18\x0c \x01(\x0b\x32\x32.google.cloud.dataproc.v1beta2.InstanceGroupConfig\x12\x46\n\x0fsoftware_config\x18\r \x01(\x0b\x32-.google.cloud.dataproc.v1beta2.SoftwareConfig\x12H\n\x10lifecycle_config\x18\x0e \x01(\x0b\x32..google.cloud.dataproc.v1beta2.LifecycleConfig\x12W\n\x16initialization_actions\x18\x0b \x03(\x0b\x32\x37.google.cloud.dataproc.v1beta2.NodeInitializationAction\x12J\n\x11\x65ncryption_config\x18\x0f \x01(\x0b\x32/.google.cloud.dataproc.v1beta2.EncryptionConfig\x12L\n\x12\x61utoscaling_config\x18\x10 \x01(\x0b\x32\x30.google.cloud.dataproc.v1beta2.AutoscalingConfig\x12\x46\n\x0f\x65ndpoint_config\x18\x11 \x01(\x0b\x32-.google.cloud.dataproc.v1beta2.EndpointConfig\x12\x46\n\x0fsecurity_config\x18\x12 \x01(\x0b\x32-.google.cloud.dataproc.v1beta2.SecurityConfig"\xb5\x01\n\x0e\x45ndpointConfig\x12P\n\nhttp_ports\x18\x01 \x03(\x0b\x32<.google.cloud.dataproc.v1beta2.EndpointConfig.HttpPortsEntry\x12\x1f\n\x17\x65nable_http_port_access\x18\x02 \x01(\x08\x1a\x30\n\x0eHttpPortsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\'\n\x11\x41utoscalingConfig\x12\x12\n\npolicy_uri\x18\x01 \x01(\t"/\n\x10\x45ncryptionConfig\x12\x1b\n\x13gce_pd_kms_key_name\x18\x01 \x01(\t"\x86\x03\n\x10GceClusterConfig\x12\x10\n\x08zone_uri\x18\x01 \x01(\t\x12\x13\n\x0bnetwork_uri\x18\x02 \x01(\t\x12\x16\n\x0esubnetwork_uri\x18\x06 \x01(\t\x12\x18\n\x10internal_ip_only\x18\x07 \x01(\x08\x12\x17\n\x0fservice_account\x18\x08 \x01(\t\x12\x1e\n\x16service_account_scopes\x18\x03 \x03(\t\x12\x0c\n\x04tags\x18\x04 \x03(\t\x12O\n\x08metadata\x18\x05 \x03(\x0b\x32=.google.cloud.dataproc.v1beta2.GceClusterConfig.MetadataEntry\x12P\n\x14reservation_affinity\x18\x0b \x01(\x0b\x32\x32.google.cloud.dataproc.v1beta2.ReservationAffinity\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xfc\x02\n\x13InstanceGroupConfig\x12\x15\n\rnum_instances\x18\x01 \x01(\x05\x12\x16\n\x0einstance_names\x18\x02 \x03(\t\x12\x11\n\timage_uri\x18\x03 \x01(\t\x12\x18\n\x10machine_type_uri\x18\x04 \x01(\t\x12>\n\x0b\x64isk_config\x18\x05 \x01(\x0b\x32).google.cloud.dataproc.v1beta2.DiskConfig\x12\x16\n\x0eis_preemptible\x18\x06 \x01(\x08\x12O\n\x14managed_group_config\x18\x07 \x01(\x0b\x32\x31.google.cloud.dataproc.v1beta2.ManagedGroupConfig\x12\x46\n\x0c\x61\x63\x63\x65lerators\x18\x08 \x03(\x0b\x32\x30.google.cloud.dataproc.v1beta2.AcceleratorConfig\x12\x18\n\x10min_cpu_platform\x18\t \x01(\t"Y\n\x12ManagedGroupConfig\x12\x1e\n\x16instance_template_name\x18\x01 \x01(\t\x12#\n\x1binstance_group_manager_name\x18\x02 \x01(\t"L\n\x11\x41\x63\x63\x65leratorConfig\x12\x1c\n\x14\x61\x63\x63\x65lerator_type_uri\x18\x01 \x01(\t\x12\x19\n\x11\x61\x63\x63\x65lerator_count\x18\x02 \x01(\x05"W\n\nDiskConfig\x12\x16\n\x0e\x62oot_disk_type\x18\x03 \x01(\t\x12\x19\n\x11\x62oot_disk_size_gb\x18\x01 \x01(\x05\x12\x16\n\x0enum_local_ssds\x18\x02 \x01(\x05"\xba\x01\n\x0fLifecycleConfig\x12\x32\n\x0fidle_delete_ttl\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x36\n\x10\x61uto_delete_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12\x34\n\x0f\x61uto_delete_ttl\x18\x03 \x01(\x0b\x32\x19.google.protobuf.DurationH\x00\x42\x05\n\x03ttl"X\n\x0eSecurityConfig\x12\x46\n\x0fkerberos_config\x18\x01 \x01(\x0b\x32-.google.cloud.dataproc.v1beta2.KerberosConfig"\xb6\x03\n\x0eKerberosConfig\x12\x17\n\x0f\x65nable_kerberos\x18\x01 \x01(\x08\x12#\n\x1broot_principal_password_uri\x18\x02 \x01(\t\x12\x13\n\x0bkms_key_uri\x18\x03 \x01(\t\x12\x14\n\x0ckeystore_uri\x18\x04 \x01(\t\x12\x16\n\x0etruststore_uri\x18\x05 \x01(\t\x12\x1d\n\x15keystore_password_uri\x18\x06 \x01(\t\x12\x18\n\x10key_password_uri\x18\x07 \x01(\t\x12\x1f\n\x17truststore_password_uri\x18\x08 \x01(\t\x12\x1f\n\x17\x63ross_realm_trust_realm\x18\t \x01(\t\x12\x1d\n\x15\x63ross_realm_trust_kdc\x18\n \x01(\t\x12&\n\x1e\x63ross_realm_trust_admin_server\x18\x0b \x01(\t\x12-\n%cross_realm_trust_shared_password_uri\x18\x0c \x01(\t\x12\x16\n\x0ekdc_db_key_uri\x18\r \x01(\t\x12\x1a\n\x12tgt_lifetime_hours\x18\x0e \x01(\x05"i\n\x18NodeInitializationAction\x12\x17\n\x0f\x65xecutable_file\x18\x01 \x01(\t\x12\x34\n\x11\x65xecution_timeout\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration"\xf7\x02\n\rClusterStatus\x12\x41\n\x05state\x18\x01 \x01(\x0e\x32\x32.google.cloud.dataproc.v1beta2.ClusterStatus.State\x12\x0e\n\x06\x64\x65tail\x18\x02 \x01(\t\x12\x34\n\x10state_start_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12G\n\x08substate\x18\x04 \x01(\x0e\x32\x35.google.cloud.dataproc.v1beta2.ClusterStatus.Substate"V\n\x05State\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0c\n\x08\x43REATING\x10\x01\x12\x0b\n\x07RUNNING\x10\x02\x12\t\n\x05\x45RROR\x10\x03\x12\x0c\n\x08\x44\x45LETING\x10\x04\x12\x0c\n\x08UPDATING\x10\x05"<\n\x08Substate\x12\x0f\n\x0bUNSPECIFIED\x10\x00\x12\r\n\tUNHEALTHY\x10\x01\x12\x10\n\x0cSTALE_STATUS\x10\x02"\xf4\x01\n\x0eSoftwareConfig\x12\x15\n\rimage_version\x18\x01 \x01(\t\x12Q\n\nproperties\x18\x02 \x03(\x0b\x32=.google.cloud.dataproc.v1beta2.SoftwareConfig.PropertiesEntry\x12\x45\n\x13optional_components\x18\x03 \x03(\x0e\x32(.google.cloud.dataproc.v1beta2.Component\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xa4\x02\n\x0e\x43lusterMetrics\x12T\n\x0chdfs_metrics\x18\x01 \x03(\x0b\x32>.google.cloud.dataproc.v1beta2.ClusterMetrics.HdfsMetricsEntry\x12T\n\x0cyarn_metrics\x18\x02 \x03(\x0b\x32>.google.cloud.dataproc.v1beta2.ClusterMetrics.YarnMetricsEntry\x1a\x32\n\x10HdfsMetricsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x03:\x02\x38\x01\x1a\x32\n\x10YarnMetricsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x03:\x02\x38\x01"\x87\x01\n\x14\x43reateClusterRequest\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x0e\n\x06region\x18\x03 \x01(\t\x12\x37\n\x07\x63luster\x18\x02 \x01(\x0b\x32&.google.cloud.dataproc.v1beta2.Cluster\x12\x12\n\nrequest_id\x18\x04 \x01(\t"\x90\x02\n\x14UpdateClusterRequest\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x0e\n\x06region\x18\x05 \x01(\t\x12\x14\n\x0c\x63luster_name\x18\x02 \x01(\t\x12\x37\n\x07\x63luster\x18\x03 \x01(\x0b\x32&.google.cloud.dataproc.v1beta2.Cluster\x12@\n\x1dgraceful_decommission_timeout\x18\x06 \x01(\x0b\x32\x19.google.protobuf.Duration\x12/\n\x0bupdate_mask\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\x12\x12\n\nrequest_id\x18\x07 \x01(\t"z\n\x14\x44\x65leteClusterRequest\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x0e\n\x06region\x18\x03 \x01(\t\x12\x14\n\x0c\x63luster_name\x18\x02 \x01(\t\x12\x14\n\x0c\x63luster_uuid\x18\x04 \x01(\t\x12\x12\n\nrequest_id\x18\x05 \x01(\t"M\n\x11GetClusterRequest\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x0e\n\x06region\x18\x03 \x01(\t\x12\x14\n\x0c\x63luster_name\x18\x02 \x01(\t"p\n\x13ListClustersRequest\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x0e\n\x06region\x18\x04 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x05 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"i\n\x14ListClustersResponse\x12\x38\n\x08\x63lusters\x18\x01 \x03(\x0b\x32&.google.cloud.dataproc.v1beta2.Cluster\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"R\n\x16\x44iagnoseClusterRequest\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x0e\n\x06region\x18\x03 \x01(\t\x12\x14\n\x0c\x63luster_name\x18\x02 \x01(\t",\n\x16\x44iagnoseClusterResults\x12\x12\n\noutput_uri\x18\x01 \x01(\t"\xee\x01\n\x13ReservationAffinity\x12Y\n\x18\x63onsume_reservation_type\x18\x01 \x01(\x0e\x32\x37.google.cloud.dataproc.v1beta2.ReservationAffinity.Type\x12\x0b\n\x03key\x18\x02 \x01(\t\x12\x0e\n\x06values\x18\x03 \x03(\t"_\n\x04Type\x12\x14\n\x10TYPE_UNSPECIFIED\x10\x00\x12\x12\n\x0eNO_RESERVATION\x10\x01\x12\x13\n\x0f\x41NY_RESERVATION\x10\x02\x12\x18\n\x14SPECIFIC_RESERVATION\x10\x03\x32\xf8\x08\n\x11\x43lusterController\x12\xae\x01\n\rCreateCluster\x12\x33.google.cloud.dataproc.v1beta2.CreateClusterRequest\x1a\x1d.google.longrunning.Operation"I\x82\xd3\xe4\x93\x02\x43"8/v1beta2/projects/{project_id}/regions/{region}/clusters:\x07\x63luster\x12\xbd\x01\n\rUpdateCluster\x12\x33.google.cloud.dataproc.v1beta2.UpdateClusterRequest\x1a\x1d.google.longrunning.Operation"X\x82\xd3\xe4\x93\x02R2G/v1beta2/projects/{project_id}/regions/{region}/clusters/{cluster_name}:\x07\x63luster\x12\xb4\x01\n\rDeleteCluster\x12\x33.google.cloud.dataproc.v1beta2.DeleteClusterRequest\x1a\x1d.google.longrunning.Operation"O\x82\xd3\xe4\x93\x02I*G/v1beta2/projects/{project_id}/regions/{region}/clusters/{cluster_name}\x12\xb7\x01\n\nGetCluster\x12\x30.google.cloud.dataproc.v1beta2.GetClusterRequest\x1a&.google.cloud.dataproc.v1beta2.Cluster"O\x82\xd3\xe4\x93\x02I\x12G/v1beta2/projects/{project_id}/regions/{region}/clusters/{cluster_name}\x12\xb9\x01\n\x0cListClusters\x12\x32.google.cloud.dataproc.v1beta2.ListClustersRequest\x1a\x33.google.cloud.dataproc.v1beta2.ListClustersResponse"@\x82\xd3\xe4\x93\x02:\x12\x38/v1beta2/projects/{project_id}/regions/{region}/clusters\x12\xc4\x01\n\x0f\x44iagnoseCluster\x12\x35.google.cloud.dataproc.v1beta2.DiagnoseClusterRequest\x1a\x1d.google.longrunning.Operation"[\x82\xd3\xe4\x93\x02U"P/v1beta2/projects/{project_id}/regions/{region}/clusters/{cluster_name}:diagnose:\x01*B{\n!com.google.cloud.dataproc.v1beta2B\rClustersProtoP\x01ZEgoogle.golang.org/genproto/googleapis/cloud/dataproc/v1beta2;dataprocb\x06proto3' + '\n2google/cloud/dataproc_v1beta2/proto/clusters.proto\x12\x1dgoogle.cloud.dataproc.v1beta2\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x34google/cloud/dataproc_v1beta2/proto/operations.proto\x1a\x30google/cloud/dataproc_v1beta2/proto/shared.proto\x1a#google/longrunning/operations.proto\x1a\x1egoogle/protobuf/duration.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xe6\x03\n\x07\x43luster\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_name\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x41\n\x06\x63onfig\x18\x03 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.ClusterConfigB\x03\xe0\x41\x02\x12G\n\x06labels\x18\x08 \x03(\x0b\x32\x32.google.cloud.dataproc.v1beta2.Cluster.LabelsEntryB\x03\xe0\x41\x01\x12\x41\n\x06status\x18\x04 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.ClusterStatusB\x03\xe0\x41\x03\x12I\n\x0estatus_history\x18\x07 \x03(\x0b\x32,.google.cloud.dataproc.v1beta2.ClusterStatusB\x03\xe0\x41\x03\x12\x19\n\x0c\x63luster_uuid\x18\x06 \x01(\tB\x03\xe0\x41\x03\x12\x43\n\x07metrics\x18\t \x01(\x0b\x32-.google.cloud.dataproc.v1beta2.ClusterMetricsB\x03\xe0\x41\x03\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xaf\x07\n\rClusterConfig\x12\x1a\n\rconfig_bucket\x18\x01 \x01(\tB\x03\xe0\x41\x01\x12P\n\x12gce_cluster_config\x18\x08 \x01(\x0b\x32/.google.cloud.dataproc.v1beta2.GceClusterConfigB\x03\xe0\x41\x01\x12N\n\rmaster_config\x18\t \x01(\x0b\x32\x32.google.cloud.dataproc.v1beta2.InstanceGroupConfigB\x03\xe0\x41\x01\x12N\n\rworker_config\x18\n \x01(\x0b\x32\x32.google.cloud.dataproc.v1beta2.InstanceGroupConfigB\x03\xe0\x41\x01\x12X\n\x17secondary_worker_config\x18\x0c \x01(\x0b\x32\x32.google.cloud.dataproc.v1beta2.InstanceGroupConfigB\x03\xe0\x41\x01\x12K\n\x0fsoftware_config\x18\r \x01(\x0b\x32-.google.cloud.dataproc.v1beta2.SoftwareConfigB\x03\xe0\x41\x01\x12M\n\x10lifecycle_config\x18\x0e \x01(\x0b\x32..google.cloud.dataproc.v1beta2.LifecycleConfigB\x03\xe0\x41\x01\x12\\\n\x16initialization_actions\x18\x0b \x03(\x0b\x32\x37.google.cloud.dataproc.v1beta2.NodeInitializationActionB\x03\xe0\x41\x01\x12O\n\x11\x65ncryption_config\x18\x0f \x01(\x0b\x32/.google.cloud.dataproc.v1beta2.EncryptionConfigB\x03\xe0\x41\x01\x12Q\n\x12\x61utoscaling_config\x18\x10 \x01(\x0b\x32\x30.google.cloud.dataproc.v1beta2.AutoscalingConfigB\x03\xe0\x41\x01\x12K\n\x0f\x65ndpoint_config\x18\x11 \x01(\x0b\x32-.google.cloud.dataproc.v1beta2.EndpointConfigB\x03\xe0\x41\x01\x12K\n\x0fsecurity_config\x18\x12 \x01(\x0b\x32-.google.cloud.dataproc.v1beta2.SecurityConfigB\x03\xe0\x41\x01"\xbf\x01\n\x0e\x45ndpointConfig\x12U\n\nhttp_ports\x18\x01 \x03(\x0b\x32<.google.cloud.dataproc.v1beta2.EndpointConfig.HttpPortsEntryB\x03\xe0\x41\x03\x12$\n\x17\x65nable_http_port_access\x18\x02 \x01(\x08\x42\x03\xe0\x41\x01\x1a\x30\n\x0eHttpPortsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01",\n\x11\x41utoscalingConfig\x12\x17\n\npolicy_uri\x18\x01 \x01(\tB\x03\xe0\x41\x01"4\n\x10\x45ncryptionConfig\x12 \n\x13gce_pd_kms_key_name\x18\x01 \x01(\tB\x03\xe0\x41\x01"\xa9\x03\n\x10GceClusterConfig\x12\x15\n\x08zone_uri\x18\x01 \x01(\tB\x03\xe0\x41\x01\x12\x18\n\x0bnetwork_uri\x18\x02 \x01(\tB\x03\xe0\x41\x01\x12\x1b\n\x0esubnetwork_uri\x18\x06 \x01(\tB\x03\xe0\x41\x01\x12\x1d\n\x10internal_ip_only\x18\x07 \x01(\x08\x42\x03\xe0\x41\x01\x12\x1c\n\x0fservice_account\x18\x08 \x01(\tB\x03\xe0\x41\x01\x12#\n\x16service_account_scopes\x18\x03 \x03(\tB\x03\xe0\x41\x01\x12\x0c\n\x04tags\x18\x04 \x03(\t\x12O\n\x08metadata\x18\x05 \x03(\x0b\x32=.google.cloud.dataproc.v1beta2.GceClusterConfig.MetadataEntry\x12U\n\x14reservation_affinity\x18\x0b \x01(\x0b\x32\x32.google.cloud.dataproc.v1beta2.ReservationAffinityB\x03\xe0\x41\x01\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xa4\x03\n\x13InstanceGroupConfig\x12\x1a\n\rnum_instances\x18\x01 \x01(\x05\x42\x03\xe0\x41\x01\x12\x1b\n\x0einstance_names\x18\x02 \x03(\tB\x03\xe0\x41\x03\x12\x16\n\timage_uri\x18\x03 \x01(\tB\x03\xe0\x41\x01\x12\x1d\n\x10machine_type_uri\x18\x04 \x01(\tB\x03\xe0\x41\x01\x12\x43\n\x0b\x64isk_config\x18\x05 \x01(\x0b\x32).google.cloud.dataproc.v1beta2.DiskConfigB\x03\xe0\x41\x01\x12\x1b\n\x0eis_preemptible\x18\x06 \x01(\x08\x42\x03\xe0\x41\x01\x12T\n\x14managed_group_config\x18\x07 \x01(\x0b\x32\x31.google.cloud.dataproc.v1beta2.ManagedGroupConfigB\x03\xe0\x41\x03\x12K\n\x0c\x61\x63\x63\x65lerators\x18\x08 \x03(\x0b\x32\x30.google.cloud.dataproc.v1beta2.AcceleratorConfigB\x03\xe0\x41\x01\x12\x18\n\x10min_cpu_platform\x18\t \x01(\t"c\n\x12ManagedGroupConfig\x12#\n\x16instance_template_name\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12(\n\x1binstance_group_manager_name\x18\x02 \x01(\tB\x03\xe0\x41\x03"L\n\x11\x41\x63\x63\x65leratorConfig\x12\x1c\n\x14\x61\x63\x63\x65lerator_type_uri\x18\x01 \x01(\t\x12\x19\n\x11\x61\x63\x63\x65lerator_count\x18\x02 \x01(\x05"a\n\nDiskConfig\x12\x1b\n\x0e\x62oot_disk_type\x18\x03 \x01(\tB\x03\xe0\x41\x01\x12\x1e\n\x11\x62oot_disk_size_gb\x18\x01 \x01(\x05\x42\x03\xe0\x41\x01\x12\x16\n\x0enum_local_ssds\x18\x02 \x01(\x05"\xf9\x01\n\x0fLifecycleConfig\x12\x37\n\x0fidle_delete_ttl\x18\x01 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x01\x12\x36\n\x10\x61uto_delete_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12\x34\n\x0f\x61uto_delete_ttl\x18\x03 \x01(\x0b\x32\x19.google.protobuf.DurationH\x00\x12\x38\n\x0fidle_start_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x42\x05\n\x03ttl"X\n\x0eSecurityConfig\x12\x46\n\x0fkerberos_config\x18\x01 \x01(\x0b\x32-.google.cloud.dataproc.v1beta2.KerberosConfig"\x90\x04\n\x0eKerberosConfig\x12\x1c\n\x0f\x65nable_kerberos\x18\x01 \x01(\x08\x42\x03\xe0\x41\x01\x12(\n\x1broot_principal_password_uri\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x18\n\x0bkms_key_uri\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0ckeystore_uri\x18\x04 \x01(\tB\x03\xe0\x41\x01\x12\x1b\n\x0etruststore_uri\x18\x05 \x01(\tB\x03\xe0\x41\x01\x12"\n\x15keystore_password_uri\x18\x06 \x01(\tB\x03\xe0\x41\x01\x12\x1d\n\x10key_password_uri\x18\x07 \x01(\tB\x03\xe0\x41\x01\x12$\n\x17truststore_password_uri\x18\x08 \x01(\tB\x03\xe0\x41\x01\x12$\n\x17\x63ross_realm_trust_realm\x18\t \x01(\tB\x03\xe0\x41\x01\x12"\n\x15\x63ross_realm_trust_kdc\x18\n \x01(\tB\x03\xe0\x41\x01\x12+\n\x1e\x63ross_realm_trust_admin_server\x18\x0b \x01(\tB\x03\xe0\x41\x01\x12\x32\n%cross_realm_trust_shared_password_uri\x18\x0c \x01(\tB\x03\xe0\x41\x01\x12\x1b\n\x0ekdc_db_key_uri\x18\r \x01(\tB\x03\xe0\x41\x01\x12\x1f\n\x12tgt_lifetime_hours\x18\x0e \x01(\x05\x42\x03\xe0\x41\x01\x12\x12\n\x05realm\x18\x0f \x01(\tB\x03\xe0\x41\x01"s\n\x18NodeInitializationAction\x12\x1c\n\x0f\x65xecutable_file\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x39\n\x11\x65xecution_timeout\x18\x02 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x01"\x8b\x03\n\rClusterStatus\x12\x46\n\x05state\x18\x01 \x01(\x0e\x32\x32.google.cloud.dataproc.v1beta2.ClusterStatus.StateB\x03\xe0\x41\x03\x12\x13\n\x06\x64\x65tail\x18\x02 \x01(\tB\x03\xe0\x41\x03\x12\x39\n\x10state_start_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12L\n\x08substate\x18\x04 \x01(\x0e\x32\x35.google.cloud.dataproc.v1beta2.ClusterStatus.SubstateB\x03\xe0\x41\x03"V\n\x05State\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0c\n\x08\x43REATING\x10\x01\x12\x0b\n\x07RUNNING\x10\x02\x12\t\n\x05\x45RROR\x10\x03\x12\x0c\n\x08\x44\x45LETING\x10\x04\x12\x0c\n\x08UPDATING\x10\x05"<\n\x08Substate\x12\x0f\n\x0bUNSPECIFIED\x10\x00\x12\r\n\tUNHEALTHY\x10\x01\x12\x10\n\x0cSTALE_STATUS\x10\x02"\xfe\x01\n\x0eSoftwareConfig\x12\x1a\n\rimage_version\x18\x01 \x01(\tB\x03\xe0\x41\x01\x12V\n\nproperties\x18\x02 \x03(\x0b\x32=.google.cloud.dataproc.v1beta2.SoftwareConfig.PropertiesEntryB\x03\xe0\x41\x01\x12\x45\n\x13optional_components\x18\x03 \x03(\x0e\x32(.google.cloud.dataproc.v1beta2.Component\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xa4\x02\n\x0e\x43lusterMetrics\x12T\n\x0chdfs_metrics\x18\x01 \x03(\x0b\x32>.google.cloud.dataproc.v1beta2.ClusterMetrics.HdfsMetricsEntry\x12T\n\x0cyarn_metrics\x18\x02 \x03(\x0b\x32>.google.cloud.dataproc.v1beta2.ClusterMetrics.YarnMetricsEntry\x1a\x32\n\x10HdfsMetricsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x03:\x02\x38\x01\x1a\x32\n\x10YarnMetricsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x03:\x02\x38\x01"\x9b\x01\n\x14\x43reateClusterRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12<\n\x07\x63luster\x18\x02 \x01(\x0b\x32&.google.cloud.dataproc.v1beta2.ClusterB\x03\xe0\x41\x02\x12\x17\n\nrequest_id\x18\x04 \x01(\tB\x03\xe0\x41\x01"\xb3\x02\n\x14UpdateClusterRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x05 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_name\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12<\n\x07\x63luster\x18\x03 \x01(\x0b\x32&.google.cloud.dataproc.v1beta2.ClusterB\x03\xe0\x41\x02\x12\x45\n\x1dgraceful_decommission_timeout\x18\x06 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x01\x12\x34\n\x0bupdate_mask\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x02\x12\x17\n\nrequest_id\x18\x07 \x01(\tB\x03\xe0\x41\x01"\x93\x01\n\x14\x44\x65leteClusterRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_name\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_uuid\x18\x04 \x01(\tB\x03\xe0\x41\x01\x12\x17\n\nrequest_id\x18\x05 \x01(\tB\x03\xe0\x41\x01"\\\n\x11GetClusterRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_name\x18\x02 \x01(\tB\x03\xe0\x41\x02"\x89\x01\n\x13ListClustersRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x04 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06\x66ilter\x18\x05 \x01(\tB\x03\xe0\x41\x01\x12\x16\n\tpage_size\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01\x12\x17\n\npage_token\x18\x03 \x01(\tB\x03\xe0\x41\x01"s\n\x14ListClustersResponse\x12=\n\x08\x63lusters\x18\x01 \x03(\x0b\x32&.google.cloud.dataproc.v1beta2.ClusterB\x03\xe0\x41\x03\x12\x1c\n\x0fnext_page_token\x18\x02 \x01(\tB\x03\xe0\x41\x03"a\n\x16\x44iagnoseClusterRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x19\n\x0c\x63luster_name\x18\x02 \x01(\tB\x03\xe0\x41\x02"1\n\x16\x44iagnoseClusterResults\x12\x17\n\noutput_uri\x18\x01 \x01(\tB\x03\xe0\x41\x03"\xfd\x01\n\x13ReservationAffinity\x12^\n\x18\x63onsume_reservation_type\x18\x01 \x01(\x0e\x32\x37.google.cloud.dataproc.v1beta2.ReservationAffinity.TypeB\x03\xe0\x41\x01\x12\x10\n\x03key\x18\x02 \x01(\tB\x03\xe0\x41\x01\x12\x13\n\x06values\x18\x03 \x03(\tB\x03\xe0\x41\x01"_\n\x04Type\x12\x14\n\x10TYPE_UNSPECIFIED\x10\x00\x12\x12\n\x0eNO_RESERVATION\x10\x01\x12\x13\n\x0f\x41NY_RESERVATION\x10\x02\x12\x18\n\x14SPECIFIC_RESERVATION\x10\x03\x32\xe7\r\n\x11\x43lusterController\x12\x91\x02\n\rCreateCluster\x12\x33.google.cloud.dataproc.v1beta2.CreateClusterRequest\x1a\x1d.google.longrunning.Operation"\xab\x01\x82\xd3\xe4\x93\x02\x43"8/v1beta2/projects/{project_id}/regions/{region}/clusters:\x07\x63luster\xda\x41\x1bproject_id, region, cluster\xca\x41\x41\n\x07\x43luster\x12\x36google.cloud.dataproc.v1beta2.ClusterOperationMetadata\x12\xbb\x02\n\rUpdateCluster\x12\x33.google.cloud.dataproc.v1beta2.UpdateClusterRequest\x1a\x1d.google.longrunning.Operation"\xd5\x01\x82\xd3\xe4\x93\x02R2G/v1beta2/projects/{project_id}/regions/{region}/clusters/{cluster_name}:\x07\x63luster\xda\x41\x36project_id, region, cluster_name, cluster, update_mask\xca\x41\x41\n\x07\x43luster\x12\x36google.cloud.dataproc.v1beta2.ClusterOperationMetadata\x12\xaa\x02\n\rDeleteCluster\x12\x33.google.cloud.dataproc.v1beta2.DeleteClusterRequest\x1a\x1d.google.longrunning.Operation"\xc4\x01\x82\xd3\xe4\x93\x02I*G/v1beta2/projects/{project_id}/regions/{region}/clusters/{cluster_name}\xda\x41 project_id, region, cluster_name\xca\x41O\n\x15google.protobuf.Empty\x12\x36google.cloud.dataproc.v1beta2.ClusterOperationMetadata\x12\xda\x01\n\nGetCluster\x12\x30.google.cloud.dataproc.v1beta2.GetClusterRequest\x1a&.google.cloud.dataproc.v1beta2.Cluster"r\x82\xd3\xe4\x93\x02I\x12G/v1beta2/projects/{project_id}/regions/{region}/clusters/{cluster_name}\xda\x41 project_id, region, cluster_name\x12\xeb\x01\n\x0cListClusters\x12\x32.google.cloud.dataproc.v1beta2.ListClustersRequest\x1a\x33.google.cloud.dataproc.v1beta2.ListClustersResponse"r\x82\xd3\xe4\x93\x02:\x12\x38/v1beta2/projects/{project_id}/regions/{region}/clusters\xda\x41\x12project_id, region\xda\x41\x1aproject_id, region, filter\x12\xba\x02\n\x0f\x44iagnoseCluster\x12\x35.google.cloud.dataproc.v1beta2.DiagnoseClusterRequest\x1a\x1d.google.longrunning.Operation"\xd0\x01\x82\xd3\xe4\x93\x02U"P/v1beta2/projects/{project_id}/regions/{region}/clusters/{cluster_name}:diagnose:\x01*\xda\x41 project_id, region, cluster_name\xca\x41O\n\x15google.protobuf.Empty\x12\x36google.cloud.dataproc.v1beta2.ClusterOperationMetadata\x1aK\xca\x41\x17\x64\x61taproc.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformB{\n!com.google.cloud.dataproc.v1beta2B\rClustersProtoP\x01ZEgoogle.golang.org/genproto/googleapis/cloud/dataproc/v1beta2;dataprocb\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_client__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_operations__pb2.DESCRIPTOR, google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_shared__pb2.DESCRIPTOR, google_dot_longrunning_dot_operations__pb2.DESCRIPTOR, @@ -80,8 +84,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=4053, - serialized_end=4139, + serialized_start=4509, + serialized_end=4595, ) _sym_db.RegisterEnumDescriptor(_CLUSTERSTATUS_STATE) @@ -103,8 +107,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=4141, - serialized_end=4201, + serialized_start=4597, + serialized_end=4657, ) _sym_db.RegisterEnumDescriptor(_CLUSTERSTATUS_SUBSTATE) @@ -141,8 +145,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=5856, - serialized_end=5951, + serialized_start=6489, + serialized_end=6584, ) _sym_db.RegisterEnumDescriptor(_RESERVATIONAFFINITY_TYPE) @@ -199,8 +203,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=757, - serialized_end=802, + serialized_start=855, + serialized_end=900, ) _CLUSTER = _descriptor.Descriptor( @@ -225,7 +229,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -243,7 +247,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -261,7 +265,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -279,7 +283,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -297,7 +301,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -315,7 +319,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -333,7 +337,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -351,7 +355,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], @@ -363,8 +367,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=356, - serialized_end=802, + serialized_start=414, + serialized_end=900, ) @@ -390,7 +394,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -408,7 +412,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -426,7 +430,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -444,7 +448,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -462,7 +466,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -480,7 +484,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -498,7 +502,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -516,7 +520,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -534,7 +538,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -552,7 +556,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -570,7 +574,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -588,7 +592,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -600,8 +604,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=805, - serialized_end=1688, + serialized_start=903, + serialized_end=1846, ) @@ -657,8 +661,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1824, - serialized_end=1872, + serialized_start=1992, + serialized_end=2040, ) _ENDPOINTCONFIG = _descriptor.Descriptor( @@ -683,7 +687,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -701,7 +705,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -713,8 +717,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1691, - serialized_end=1872, + serialized_start=1849, + serialized_end=2040, ) @@ -740,7 +744,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ) ], @@ -752,8 +756,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1874, - serialized_end=1913, + serialized_start=2042, + serialized_end=2086, ) @@ -779,7 +783,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ) ], @@ -791,8 +795,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1915, - serialized_end=1962, + serialized_start=2088, + serialized_end=2140, ) @@ -848,8 +852,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2308, - serialized_end=2355, + serialized_start=2521, + serialized_end=2568, ) _GCECLUSTERCONFIG = _descriptor.Descriptor( @@ -874,7 +878,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -892,7 +896,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -910,7 +914,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -928,7 +932,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -946,7 +950,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -964,7 +968,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1018,7 +1022,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -1030,8 +1034,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1965, - serialized_end=2355, + serialized_start=2143, + serialized_end=2568, ) @@ -1057,7 +1061,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1075,7 +1079,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1093,7 +1097,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1111,7 +1115,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1129,7 +1133,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1147,7 +1151,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1165,7 +1169,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1183,7 +1187,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1213,8 +1217,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2358, - serialized_end=2738, + serialized_start=2571, + serialized_end=2991, ) @@ -1240,7 +1244,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1258,7 +1262,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], @@ -1270,8 +1274,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2740, - serialized_end=2829, + serialized_start=2993, + serialized_end=3092, ) @@ -1327,8 +1331,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2831, - serialized_end=2907, + serialized_start=3094, + serialized_end=3170, ) @@ -1354,7 +1358,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1372,7 +1376,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1402,8 +1406,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2909, - serialized_end=2996, + serialized_start=3172, + serialized_end=3269, ) @@ -1429,7 +1433,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1468,6 +1472,24 @@ serialized_options=None, file=DESCRIPTOR, ), + _descriptor.FieldDescriptor( + name="idle_start_time", + full_name="google.cloud.dataproc.v1beta2.LifecycleConfig.idle_start_time", + index=3, + number=4, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\003"), + file=DESCRIPTOR, + ), ], extensions=[], nested_types=[], @@ -1485,8 +1507,8 @@ fields=[], ) ], - serialized_start=2999, - serialized_end=3185, + serialized_start=3272, + serialized_end=3521, ) @@ -1524,8 +1546,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3187, - serialized_end=3275, + serialized_start=3523, + serialized_end=3611, ) @@ -1551,7 +1573,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1569,7 +1591,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1587,7 +1609,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1605,7 +1627,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1623,7 +1645,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1641,7 +1663,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1659,7 +1681,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1677,7 +1699,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1695,7 +1717,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1713,7 +1735,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1731,7 +1753,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1749,7 +1771,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1767,7 +1789,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1785,7 +1807,25 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="realm", + full_name="google.cloud.dataproc.v1beta2.KerberosConfig.realm", + index=14, + number=15, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -1797,8 +1837,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3278, - serialized_end=3716, + serialized_start=3614, + serialized_end=4142, ) @@ -1824,7 +1864,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1842,7 +1882,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -1854,8 +1894,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3718, - serialized_end=3823, + serialized_start=4144, + serialized_end=4259, ) @@ -1881,7 +1921,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1899,7 +1939,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1917,7 +1957,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1935,7 +1975,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], @@ -1947,8 +1987,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3826, - serialized_end=4201, + serialized_start=4262, + serialized_end=4657, ) @@ -2004,8 +2044,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4399, - serialized_end=4448, + serialized_start=4865, + serialized_end=4914, ) _SOFTWARECONFIG = _descriptor.Descriptor( @@ -2030,7 +2070,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2048,7 +2088,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2078,8 +2118,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4204, - serialized_end=4448, + serialized_start=4660, + serialized_end=4914, ) @@ -2135,8 +2175,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4641, - serialized_end=4691, + serialized_start=5107, + serialized_end=5157, ) _CLUSTERMETRICS_YARNMETRICSENTRY = _descriptor.Descriptor( @@ -2191,8 +2231,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4693, - serialized_end=4743, + serialized_start=5159, + serialized_end=5209, ) _CLUSTERMETRICS = _descriptor.Descriptor( @@ -2247,8 +2287,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4451, - serialized_end=4743, + serialized_start=4917, + serialized_end=5209, ) @@ -2274,7 +2314,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2292,7 +2332,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2310,7 +2350,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2328,7 +2368,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -2340,8 +2380,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4746, - serialized_end=4881, + serialized_start=5212, + serialized_end=5367, ) @@ -2367,7 +2407,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2385,7 +2425,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2403,7 +2443,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2421,7 +2461,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2439,7 +2479,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2457,7 +2497,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2475,7 +2515,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -2487,8 +2527,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4884, - serialized_end=5156, + serialized_start=5370, + serialized_end=5677, ) @@ -2514,7 +2554,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2532,7 +2572,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2550,7 +2590,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2568,7 +2608,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2586,7 +2626,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -2598,8 +2638,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5158, - serialized_end=5280, + serialized_start=5680, + serialized_end=5827, ) @@ -2625,7 +2665,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2643,7 +2683,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2661,7 +2701,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -2673,8 +2713,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5282, - serialized_end=5359, + serialized_start=5829, + serialized_end=5921, ) @@ -2700,7 +2740,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2718,7 +2758,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2736,7 +2776,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2754,7 +2794,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2772,7 +2812,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -2784,8 +2824,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5361, - serialized_end=5473, + serialized_start=5924, + serialized_end=6061, ) @@ -2811,7 +2851,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2829,7 +2869,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], @@ -2841,8 +2881,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5475, - serialized_end=5580, + serialized_start=6063, + serialized_end=6178, ) @@ -2868,7 +2908,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2886,7 +2926,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2904,7 +2944,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -2916,8 +2956,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5582, - serialized_end=5664, + serialized_start=6180, + serialized_end=6277, ) @@ -2943,7 +2983,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ) ], @@ -2955,8 +2995,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5666, - serialized_end=5710, + serialized_start=6279, + serialized_end=6328, ) @@ -2982,7 +3022,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -3000,7 +3040,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -3018,7 +3058,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -3030,8 +3070,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5713, - serialized_end=5951, + serialized_start=6331, + serialized_end=6584, ) _CLUSTER_LABELSENTRY.containing_type = _CLUSTER @@ -3080,6 +3120,9 @@ _LIFECYCLECONFIG.fields_by_name[ "auto_delete_ttl" ].message_type = google_dot_protobuf_dot_duration__pb2._DURATION +_LIFECYCLECONFIG.fields_by_name[ + "idle_start_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP _LIFECYCLECONFIG.oneofs_by_name["ttl"].fields.append( _LIFECYCLECONFIG.fields_by_name["auto_delete_time"] ) @@ -3458,8 +3501,8 @@ dict( DESCRIPTOR=_INSTANCEGROUPCONFIG, __module__="google.cloud.dataproc_v1beta2.proto.clusters_pb2", - __doc__="""Optional. The config settings for Compute Engine resources in an - instance group, such as a master or worker group. + __doc__="""The config settings for Compute Engine resources in an instance group, + such as a master or worker group. Attributes: @@ -3497,11 +3540,10 @@ preemptible instance groups. accelerators: Optional. The Compute Engine accelerator configuration for - these instances. **Beta Feature**: This feature is still - under development. It may be changed before final release. + these instances. min_cpu_platform: - Optional. Specifies the minimum cpu platform for the Instance - Group. See [Cloud Dataproc→Minimum CPU Platform] + Specifies the minimum cpu platform for the Instance Group. See + [Cloud Dataproc→Minimum CPU Platform] (/dataproc/docs/concepts/compute/dataproc-min-cpu). """, # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.InstanceGroupConfig) @@ -3584,13 +3626,12 @@ boot_disk_size_gb: Optional. Size in GB of the boot disk (default is 500GB). num_local_ssds: - Optional. Number of attached SSDs, from 0 to 4 (default is 0). - If SSDs are not attached, the boot disk is used to store - runtime logs and `HDFS `__ data. If one or more SSDs are - attached, this runtime bulk data is spread across them, and - the boot disk contains only basic config and installed - binaries. + Number of attached SSDs, from 0 to 4 (default is 0). If SSDs + are not attached, the boot disk is used to store runtime logs + and `HDFS `__ data. If one or more SSDs are attached, this + runtime bulk data is spread across them, and the boot disk + contains only basic config and installed binaries. """, # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.DiskConfig) ), @@ -3614,8 +3655,8 @@ value, to delete the cluster when it has had no jobs running for 10 minutes. ttl: - Optional. Either the exact time the cluster should be deleted - at or the cluster maximum age. + Either the exact time the cluster should be deleted at or the + cluster maximum age. auto_delete_time: Optional. The time when cluster will be auto-deleted. auto_delete_ttl: @@ -3623,6 +3664,10 @@ be auto-deleted at the end of this period. Valid range: **[10m, 14d]**. Example: **"1d"**, to delete the cluster 1 day after its creation.. + idle_start_time: + Output only. The time when cluster became idle (most recent + job finished) and became eligible for deletion due to + idleness. """, # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.LifecycleConfig) ), @@ -3709,6 +3754,10 @@ Optional. The lifetime of the ticket granting ticket, in hours. If not specified, or user specifies 0, then default value 10 will be used. + realm: + Optional. The name of the on-cluster Kerberos realm. If not + specified, the uppercased domain of hostnames will be the + realm. """, # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.KerberosConfig) ), @@ -4186,20 +4235,123 @@ DESCRIPTOR._options = None _CLUSTER_LABELSENTRY._options = None +_CLUSTER.fields_by_name["project_id"]._options = None +_CLUSTER.fields_by_name["cluster_name"]._options = None +_CLUSTER.fields_by_name["config"]._options = None +_CLUSTER.fields_by_name["labels"]._options = None +_CLUSTER.fields_by_name["status"]._options = None +_CLUSTER.fields_by_name["status_history"]._options = None +_CLUSTER.fields_by_name["cluster_uuid"]._options = None +_CLUSTER.fields_by_name["metrics"]._options = None +_CLUSTERCONFIG.fields_by_name["config_bucket"]._options = None +_CLUSTERCONFIG.fields_by_name["gce_cluster_config"]._options = None +_CLUSTERCONFIG.fields_by_name["master_config"]._options = None +_CLUSTERCONFIG.fields_by_name["worker_config"]._options = None +_CLUSTERCONFIG.fields_by_name["secondary_worker_config"]._options = None +_CLUSTERCONFIG.fields_by_name["software_config"]._options = None +_CLUSTERCONFIG.fields_by_name["lifecycle_config"]._options = None +_CLUSTERCONFIG.fields_by_name["initialization_actions"]._options = None +_CLUSTERCONFIG.fields_by_name["encryption_config"]._options = None +_CLUSTERCONFIG.fields_by_name["autoscaling_config"]._options = None +_CLUSTERCONFIG.fields_by_name["endpoint_config"]._options = None +_CLUSTERCONFIG.fields_by_name["security_config"]._options = None _ENDPOINTCONFIG_HTTPPORTSENTRY._options = None +_ENDPOINTCONFIG.fields_by_name["http_ports"]._options = None +_ENDPOINTCONFIG.fields_by_name["enable_http_port_access"]._options = None +_AUTOSCALINGCONFIG.fields_by_name["policy_uri"]._options = None +_ENCRYPTIONCONFIG.fields_by_name["gce_pd_kms_key_name"]._options = None _GCECLUSTERCONFIG_METADATAENTRY._options = None +_GCECLUSTERCONFIG.fields_by_name["zone_uri"]._options = None +_GCECLUSTERCONFIG.fields_by_name["network_uri"]._options = None +_GCECLUSTERCONFIG.fields_by_name["subnetwork_uri"]._options = None +_GCECLUSTERCONFIG.fields_by_name["internal_ip_only"]._options = None +_GCECLUSTERCONFIG.fields_by_name["service_account"]._options = None +_GCECLUSTERCONFIG.fields_by_name["service_account_scopes"]._options = None +_GCECLUSTERCONFIG.fields_by_name["reservation_affinity"]._options = None +_INSTANCEGROUPCONFIG.fields_by_name["num_instances"]._options = None +_INSTANCEGROUPCONFIG.fields_by_name["instance_names"]._options = None +_INSTANCEGROUPCONFIG.fields_by_name["image_uri"]._options = None +_INSTANCEGROUPCONFIG.fields_by_name["machine_type_uri"]._options = None +_INSTANCEGROUPCONFIG.fields_by_name["disk_config"]._options = None +_INSTANCEGROUPCONFIG.fields_by_name["is_preemptible"]._options = None +_INSTANCEGROUPCONFIG.fields_by_name["managed_group_config"]._options = None +_INSTANCEGROUPCONFIG.fields_by_name["accelerators"]._options = None +_MANAGEDGROUPCONFIG.fields_by_name["instance_template_name"]._options = None +_MANAGEDGROUPCONFIG.fields_by_name["instance_group_manager_name"]._options = None +_DISKCONFIG.fields_by_name["boot_disk_type"]._options = None +_DISKCONFIG.fields_by_name["boot_disk_size_gb"]._options = None +_LIFECYCLECONFIG.fields_by_name["idle_delete_ttl"]._options = None +_LIFECYCLECONFIG.fields_by_name["idle_start_time"]._options = None +_KERBEROSCONFIG.fields_by_name["enable_kerberos"]._options = None +_KERBEROSCONFIG.fields_by_name["root_principal_password_uri"]._options = None +_KERBEROSCONFIG.fields_by_name["kms_key_uri"]._options = None +_KERBEROSCONFIG.fields_by_name["keystore_uri"]._options = None +_KERBEROSCONFIG.fields_by_name["truststore_uri"]._options = None +_KERBEROSCONFIG.fields_by_name["keystore_password_uri"]._options = None +_KERBEROSCONFIG.fields_by_name["key_password_uri"]._options = None +_KERBEROSCONFIG.fields_by_name["truststore_password_uri"]._options = None +_KERBEROSCONFIG.fields_by_name["cross_realm_trust_realm"]._options = None +_KERBEROSCONFIG.fields_by_name["cross_realm_trust_kdc"]._options = None +_KERBEROSCONFIG.fields_by_name["cross_realm_trust_admin_server"]._options = None +_KERBEROSCONFIG.fields_by_name["cross_realm_trust_shared_password_uri"]._options = None +_KERBEROSCONFIG.fields_by_name["kdc_db_key_uri"]._options = None +_KERBEROSCONFIG.fields_by_name["tgt_lifetime_hours"]._options = None +_KERBEROSCONFIG.fields_by_name["realm"]._options = None +_NODEINITIALIZATIONACTION.fields_by_name["executable_file"]._options = None +_NODEINITIALIZATIONACTION.fields_by_name["execution_timeout"]._options = None +_CLUSTERSTATUS.fields_by_name["state"]._options = None +_CLUSTERSTATUS.fields_by_name["detail"]._options = None +_CLUSTERSTATUS.fields_by_name["state_start_time"]._options = None +_CLUSTERSTATUS.fields_by_name["substate"]._options = None _SOFTWARECONFIG_PROPERTIESENTRY._options = None +_SOFTWARECONFIG.fields_by_name["image_version"]._options = None +_SOFTWARECONFIG.fields_by_name["properties"]._options = None _CLUSTERMETRICS_HDFSMETRICSENTRY._options = None _CLUSTERMETRICS_YARNMETRICSENTRY._options = None +_CREATECLUSTERREQUEST.fields_by_name["project_id"]._options = None +_CREATECLUSTERREQUEST.fields_by_name["region"]._options = None +_CREATECLUSTERREQUEST.fields_by_name["cluster"]._options = None +_CREATECLUSTERREQUEST.fields_by_name["request_id"]._options = None +_UPDATECLUSTERREQUEST.fields_by_name["project_id"]._options = None +_UPDATECLUSTERREQUEST.fields_by_name["region"]._options = None +_UPDATECLUSTERREQUEST.fields_by_name["cluster_name"]._options = None +_UPDATECLUSTERREQUEST.fields_by_name["cluster"]._options = None +_UPDATECLUSTERREQUEST.fields_by_name["graceful_decommission_timeout"]._options = None +_UPDATECLUSTERREQUEST.fields_by_name["update_mask"]._options = None +_UPDATECLUSTERREQUEST.fields_by_name["request_id"]._options = None +_DELETECLUSTERREQUEST.fields_by_name["project_id"]._options = None +_DELETECLUSTERREQUEST.fields_by_name["region"]._options = None +_DELETECLUSTERREQUEST.fields_by_name["cluster_name"]._options = None +_DELETECLUSTERREQUEST.fields_by_name["cluster_uuid"]._options = None +_DELETECLUSTERREQUEST.fields_by_name["request_id"]._options = None +_GETCLUSTERREQUEST.fields_by_name["project_id"]._options = None +_GETCLUSTERREQUEST.fields_by_name["region"]._options = None +_GETCLUSTERREQUEST.fields_by_name["cluster_name"]._options = None +_LISTCLUSTERSREQUEST.fields_by_name["project_id"]._options = None +_LISTCLUSTERSREQUEST.fields_by_name["region"]._options = None +_LISTCLUSTERSREQUEST.fields_by_name["filter"]._options = None +_LISTCLUSTERSREQUEST.fields_by_name["page_size"]._options = None +_LISTCLUSTERSREQUEST.fields_by_name["page_token"]._options = None +_LISTCLUSTERSRESPONSE.fields_by_name["clusters"]._options = None +_LISTCLUSTERSRESPONSE.fields_by_name["next_page_token"]._options = None +_DIAGNOSECLUSTERREQUEST.fields_by_name["project_id"]._options = None +_DIAGNOSECLUSTERREQUEST.fields_by_name["region"]._options = None +_DIAGNOSECLUSTERREQUEST.fields_by_name["cluster_name"]._options = None +_DIAGNOSECLUSTERRESULTS.fields_by_name["output_uri"]._options = None +_RESERVATIONAFFINITY.fields_by_name["consume_reservation_type"]._options = None +_RESERVATIONAFFINITY.fields_by_name["key"]._options = None +_RESERVATIONAFFINITY.fields_by_name["values"]._options = None _CLUSTERCONTROLLER = _descriptor.ServiceDescriptor( name="ClusterController", full_name="google.cloud.dataproc.v1beta2.ClusterController", file=DESCRIPTOR, index=0, - serialized_options=None, - serialized_start=5954, - serialized_end=7098, + serialized_options=_b( + "\312A\027dataproc.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform" + ), + serialized_start=6587, + serialized_end=8354, methods=[ _descriptor.MethodDescriptor( name="CreateCluster", @@ -4209,7 +4361,7 @@ input_type=_CREATECLUSTERREQUEST, output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, serialized_options=_b( - '\202\323\344\223\002C"8/v1beta2/projects/{project_id}/regions/{region}/clusters:\007cluster' + '\202\323\344\223\002C"8/v1beta2/projects/{project_id}/regions/{region}/clusters:\007cluster\332A\033project_id, region, cluster\312AA\n\007Cluster\0226google.cloud.dataproc.v1beta2.ClusterOperationMetadata' ), ), _descriptor.MethodDescriptor( @@ -4220,7 +4372,7 @@ input_type=_UPDATECLUSTERREQUEST, output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, serialized_options=_b( - "\202\323\344\223\002R2G/v1beta2/projects/{project_id}/regions/{region}/clusters/{cluster_name}:\007cluster" + "\202\323\344\223\002R2G/v1beta2/projects/{project_id}/regions/{region}/clusters/{cluster_name}:\007cluster\332A6project_id, region, cluster_name, cluster, update_mask\312AA\n\007Cluster\0226google.cloud.dataproc.v1beta2.ClusterOperationMetadata" ), ), _descriptor.MethodDescriptor( @@ -4231,7 +4383,7 @@ input_type=_DELETECLUSTERREQUEST, output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, serialized_options=_b( - "\202\323\344\223\002I*G/v1beta2/projects/{project_id}/regions/{region}/clusters/{cluster_name}" + "\202\323\344\223\002I*G/v1beta2/projects/{project_id}/regions/{region}/clusters/{cluster_name}\332A project_id, region, cluster_name\312AO\n\025google.protobuf.Empty\0226google.cloud.dataproc.v1beta2.ClusterOperationMetadata" ), ), _descriptor.MethodDescriptor( @@ -4242,7 +4394,7 @@ input_type=_GETCLUSTERREQUEST, output_type=_CLUSTER, serialized_options=_b( - "\202\323\344\223\002I\022G/v1beta2/projects/{project_id}/regions/{region}/clusters/{cluster_name}" + "\202\323\344\223\002I\022G/v1beta2/projects/{project_id}/regions/{region}/clusters/{cluster_name}\332A project_id, region, cluster_name" ), ), _descriptor.MethodDescriptor( @@ -4253,7 +4405,7 @@ input_type=_LISTCLUSTERSREQUEST, output_type=_LISTCLUSTERSRESPONSE, serialized_options=_b( - "\202\323\344\223\002:\0228/v1beta2/projects/{project_id}/regions/{region}/clusters" + "\202\323\344\223\002:\0228/v1beta2/projects/{project_id}/regions/{region}/clusters\332A\022project_id, region\332A\032project_id, region, filter" ), ), _descriptor.MethodDescriptor( @@ -4264,7 +4416,7 @@ input_type=_DIAGNOSECLUSTERREQUEST, output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, serialized_options=_b( - '\202\323\344\223\002U"P/v1beta2/projects/{project_id}/regions/{region}/clusters/{cluster_name}:diagnose:\001*' + '\202\323\344\223\002U"P/v1beta2/projects/{project_id}/regions/{region}/clusters/{cluster_name}:diagnose:\001*\332A project_id, region, cluster_name\312AO\n\025google.protobuf.Empty\0226google.cloud.dataproc.v1beta2.ClusterOperationMetadata' ), ), ], diff --git a/dataproc/google/cloud/dataproc_v1beta2/proto/clusters_pb2_grpc.py b/dataproc/google/cloud/dataproc_v1beta2/proto/clusters_pb2_grpc.py index aa8da2e86652..de9821404290 100644 --- a/dataproc/google/cloud/dataproc_v1beta2/proto/clusters_pb2_grpc.py +++ b/dataproc/google/cloud/dataproc_v1beta2/proto/clusters_pb2_grpc.py @@ -58,21 +58,27 @@ class ClusterControllerServicer(object): """ def CreateCluster(self, request, context): - """Creates a cluster in a project. + """Creates a cluster in a project. The returned + [Operation.metadata][google.longrunning.Operation.metadata] will be + [ClusterOperationMetadata](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1beta2#clusteroperationmetadata). """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def UpdateCluster(self, request, context): - """Updates a cluster in a project. + """Updates a cluster in a project. The returned + [Operation.metadata][google.longrunning.Operation.metadata] will be + [ClusterOperationMetadata](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1beta2#clusteroperationmetadata). """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def DeleteCluster(self, request, context): - """Deletes a cluster in a project. + """Deletes a cluster in a project. The returned + [Operation.metadata][google.longrunning.Operation.metadata] will be + [ClusterOperationMetadata](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1beta2#clusteroperationmetadata). """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") @@ -93,9 +99,13 @@ def ListClusters(self, request, context): raise NotImplementedError("Method not implemented!") def DiagnoseCluster(self, request, context): - """Gets cluster diagnostic information. - After the operation completes, the Operation.response field - contains `DiagnoseClusterOutputLocation`. + """Gets cluster diagnostic information. The returned + [Operation.metadata][google.longrunning.Operation.metadata] will be + [ClusterOperationMetadata](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1beta2#clusteroperationmetadata). + After the operation completes, + [Operation.response][google.longrunning.Operation.response] + contains + [Empty](google.protobuf.Empty). """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") diff --git a/dataproc/google/cloud/dataproc_v1beta2/proto/jobs.proto b/dataproc/google/cloud/dataproc_v1beta2/proto/jobs.proto index 4d888dafc011..c1e643c92fd1 100644 --- a/dataproc/google/cloud/dataproc_v1beta2/proto/jobs.proto +++ b/dataproc/google/cloud/dataproc_v1beta2/proto/jobs.proto @@ -18,6 +18,8 @@ syntax = "proto3"; package google.cloud.dataproc.v1beta2; import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; import "google/protobuf/empty.proto"; import "google/protobuf/field_mask.proto"; import "google/protobuf/timestamp.proto"; @@ -29,12 +31,16 @@ option java_package = "com.google.cloud.dataproc.v1beta2"; // The JobController provides methods to manage jobs. service JobController { + option (google.api.default_host) = "dataproc.googleapis.com"; + option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; + // Submits a job to a cluster. rpc SubmitJob(SubmitJobRequest) returns (Job) { option (google.api.http) = { post: "/v1beta2/projects/{project_id}/regions/{region}/jobs:submit" body: "*" }; + option (google.api.method_signature) = "project_id, region, job"; } // Gets the resource representation for a job in a project. @@ -42,6 +48,7 @@ service JobController { option (google.api.http) = { get: "/v1beta2/projects/{project_id}/regions/{region}/jobs/{job_id}" }; + option (google.api.method_signature) = "project_id, region, job_id"; } // Lists regions/{region}/jobs in a project. @@ -49,6 +56,8 @@ service JobController { option (google.api.http) = { get: "/v1beta2/projects/{project_id}/regions/{region}/jobs" }; + option (google.api.method_signature) = "project_id, region"; + option (google.api.method_signature) = "project_id, region, filter"; } // Updates a job in a project. @@ -69,6 +78,7 @@ service JobController { post: "/v1beta2/projects/{project_id}/regions/{region}/jobs/{job_id}:cancel" body: "*" }; + option (google.api.method_signature) = "project_id, region, job_id"; } // Deletes the job from the project. If the job is active, the delete fails, @@ -77,6 +87,7 @@ service JobController { option (google.api.http) = { delete: "/v1beta2/projects/{project_id}/regions/{region}/jobs/{job_id}" }; + option (google.api.method_signature) = "project_id, region, job_id"; } } @@ -176,12 +187,12 @@ message HadoopJob { // A Cloud Dataproc job for running [Apache Spark](http://spark.apache.org/) // applications on YARN. +// The specification of the main method to call to drive the job. +// Specify either the jar file that contains the main class or the main class +// name. To pass both a main jar and a main class in that jar, add the jar to +// `CommonJob.jar_file_uris`, and then specify the main class name in +// `main_class`. message SparkJob { - // Required. The specification of the main method to call to drive the job. - // Specify either the jar file that contains the main class or the main class - // name. To pass both a main jar and a main class in that jar, add the jar to - // `CommonJob.jar_file_uris`, and then specify the main class name in - // `main_class`. oneof driver { // The HCFS URI of the jar file that contains the main class. string main_jar_file_uri = 1; @@ -226,7 +237,7 @@ message SparkJob { message PySparkJob { // Required. The HCFS URI of the main Python file to use as the driver. Must // be a .py file. - string main_python_file_uri = 1; + string main_python_file_uri = 1 [(google.api.field_behavior) = REQUIRED]; // Optional. The arguments to pass to the driver. Do not include arguments, // such as `--conf`, that can be set as job properties, since a collision may @@ -275,7 +286,7 @@ message QueryList { // ] // } // } - repeated string queries = 1; + repeated string queries = 1 [(google.api.field_behavior) = REQUIRED]; } // A Cloud Dataproc job for running [Apache Hive](https://hive.apache.org/) @@ -383,7 +394,7 @@ message PigJob { message SparkRJob { // Required. The HCFS URI of the main R file to use as the driver. // Must be a .R file. - string main_r_file_uri = 1; + string main_r_file_uri = 1 [(google.api.field_behavior) = REQUIRED]; // Optional. The arguments to pass to the driver. Do not include arguments, // such as `--conf`, that can be set as job properties, since a collision may @@ -412,7 +423,7 @@ message SparkRJob { // Cloud Dataproc job config. message JobPlacement { // Required. The name of the cluster where the job will be submitted. - string cluster_name = 1; + string cluster_name = 1 [(google.api.field_behavior) = REQUIRED]; // Output only. A cluster UUID generated by the Cloud Dataproc service when // the job is submitted. @@ -503,7 +514,7 @@ message JobStatus { message JobReference { // Required. The ID of the Google Cloud Platform project that the job // belongs to. - string project_id = 1; + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; // Optional. The job ID, which must be unique within the project. // @@ -551,20 +562,20 @@ message YarnApplication { KILLED = 8; } - // Required. The application name. - string name = 1; + // Output only. The application name. + string name = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; - // Required. The application state. - State state = 2; + // Output only. The application state. + State state = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; - // Required. The numerical progress of the application, from 1 to 100. - float progress = 3; + // Output only. The numerical progress of the application, from 1 to 100. + float progress = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; - // Optional. The HTTP URL of the ApplicationMaster, HistoryServer, or + // Optional. Output only. The HTTP URL of the ApplicationMaster, HistoryServer, or // TimelineServer that provides application-specific information. The URL uses // the internal hostname, and requires a proxy server for resolution and, // possibly, access. - string tracking_url = 4; + string tracking_url = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; } // A Cloud Dataproc job resource. @@ -577,7 +588,7 @@ message Job { // Required. Job information, including how, when, and where to // run the job. - JobPlacement placement = 2; + JobPlacement placement = 2 [(google.api.field_behavior) = REQUIRED]; // Required. The application/framework-specific portion of the job. oneof type_job { @@ -665,13 +676,13 @@ message JobScheduling { message SubmitJobRequest { // Required. The ID of the Google Cloud Platform project that the job // belongs to. - string project_id = 1; + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; // Required. The Cloud Dataproc region in which to handle the request. - string region = 3; + string region = 3 [(google.api.field_behavior) = REQUIRED]; // Required. The job resource. - Job job = 2; + Job job = 2 [(google.api.field_behavior) = REQUIRED]; // Optional. A unique id used to identify the request. If the server // receives two [SubmitJobRequest][google.cloud.dataproc.v1beta2.SubmitJobRequest] requests with the same @@ -691,13 +702,13 @@ message SubmitJobRequest { message GetJobRequest { // Required. The ID of the Google Cloud Platform project that the job // belongs to. - string project_id = 1; + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; // Required. The Cloud Dataproc region in which to handle the request. - string region = 3; + string region = 3 [(google.api.field_behavior) = REQUIRED]; // Required. The job ID. - string job_id = 2; + string job_id = 2 [(google.api.field_behavior) = REQUIRED]; } // A request to list jobs in a project. @@ -717,10 +728,10 @@ message ListJobsRequest { // Required. The ID of the Google Cloud Platform project that the job // belongs to. - string project_id = 1; + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; // Required. The Cloud Dataproc region in which to handle the request. - string region = 6; + string region = 6 [(google.api.field_behavior) = REQUIRED]; // Optional. The number of results to return in each response. int32 page_size = 2; @@ -760,16 +771,16 @@ message ListJobsRequest { message UpdateJobRequest { // Required. The ID of the Google Cloud Platform project that the job // belongs to. - string project_id = 1; + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; // Required. The Cloud Dataproc region in which to handle the request. - string region = 2; + string region = 2 [(google.api.field_behavior) = REQUIRED]; // Required. The job ID. - string job_id = 3; + string job_id = 3 [(google.api.field_behavior) = REQUIRED]; // Required. The changes to the job. - Job job = 4; + Job job = 4 [(google.api.field_behavior) = REQUIRED]; // Required. Specifies the path, relative to Job, of // the field to update. For example, to update the labels of a Job the @@ -777,7 +788,7 @@ message UpdateJobRequest { // labels, and the `PATCH` request body would specify the new // value. Note: Currently, labels is the only // field that can be updated. - google.protobuf.FieldMask update_mask = 5; + google.protobuf.FieldMask update_mask = 5 [(google.api.field_behavior) = REQUIRED]; } // A list of jobs in a project. @@ -795,24 +806,24 @@ message ListJobsResponse { message CancelJobRequest { // Required. The ID of the Google Cloud Platform project that the job // belongs to. - string project_id = 1; + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; // Required. The Cloud Dataproc region in which to handle the request. - string region = 3; + string region = 3 [(google.api.field_behavior) = REQUIRED]; // Required. The job ID. - string job_id = 2; + string job_id = 2 [(google.api.field_behavior) = REQUIRED]; } // A request to delete a job. message DeleteJobRequest { // Required. The ID of the Google Cloud Platform project that the job // belongs to. - string project_id = 1; + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; // Required. The Cloud Dataproc region in which to handle the request. - string region = 3; + string region = 3 [(google.api.field_behavior) = REQUIRED]; // Required. The job ID. - string job_id = 2; + string job_id = 2 [(google.api.field_behavior) = REQUIRED]; } diff --git a/dataproc/google/cloud/dataproc_v1beta2/proto/jobs_pb2.py b/dataproc/google/cloud/dataproc_v1beta2/proto/jobs_pb2.py index 6379fd2c8bbc..b25037c0cddf 100644 --- a/dataproc/google/cloud/dataproc_v1beta2/proto/jobs_pb2.py +++ b/dataproc/google/cloud/dataproc_v1beta2/proto/jobs_pb2.py @@ -16,6 +16,8 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 @@ -29,10 +31,12 @@ "\n!com.google.cloud.dataproc.v1beta2B\tJobsProtoP\001ZEgoogle.golang.org/genproto/googleapis/cloud/dataproc/v1beta2;dataproc" ), serialized_pb=_b( - '\n.google/cloud/dataproc_v1beta2/proto/jobs.proto\x12\x1dgoogle.cloud.dataproc.v1beta2\x1a\x1cgoogle/api/annotations.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xcb\x02\n\rLoggingConfig\x12\\\n\x11\x64river_log_levels\x18\x02 \x03(\x0b\x32\x41.google.cloud.dataproc.v1beta2.LoggingConfig.DriverLogLevelsEntry\x1aj\n\x14\x44riverLogLevelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x41\n\x05value\x18\x02 \x01(\x0e\x32\x32.google.cloud.dataproc.v1beta2.LoggingConfig.Level:\x02\x38\x01"p\n\x05Level\x12\x15\n\x11LEVEL_UNSPECIFIED\x10\x00\x12\x07\n\x03\x41LL\x10\x01\x12\t\n\x05TRACE\x10\x02\x12\t\n\x05\x44\x45\x42UG\x10\x03\x12\x08\n\x04INFO\x10\x04\x12\x08\n\x04WARN\x10\x05\x12\t\n\x05\x45RROR\x10\x06\x12\t\n\x05\x46\x41TAL\x10\x07\x12\x07\n\x03OFF\x10\x08"\xdd\x02\n\tHadoopJob\x12\x1b\n\x11main_jar_file_uri\x18\x01 \x01(\tH\x00\x12\x14\n\nmain_class\x18\x02 \x01(\tH\x00\x12\x0c\n\x04\x61rgs\x18\x03 \x03(\t\x12\x15\n\rjar_file_uris\x18\x04 \x03(\t\x12\x11\n\tfile_uris\x18\x05 \x03(\t\x12\x14\n\x0c\x61rchive_uris\x18\x06 \x03(\t\x12L\n\nproperties\x18\x07 \x03(\x0b\x32\x38.google.cloud.dataproc.v1beta2.HadoopJob.PropertiesEntry\x12\x44\n\x0elogging_config\x18\x08 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.LoggingConfig\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x08\n\x06\x64river"\xdb\x02\n\x08SparkJob\x12\x1b\n\x11main_jar_file_uri\x18\x01 \x01(\tH\x00\x12\x14\n\nmain_class\x18\x02 \x01(\tH\x00\x12\x0c\n\x04\x61rgs\x18\x03 \x03(\t\x12\x15\n\rjar_file_uris\x18\x04 \x03(\t\x12\x11\n\tfile_uris\x18\x05 \x03(\t\x12\x14\n\x0c\x61rchive_uris\x18\x06 \x03(\t\x12K\n\nproperties\x18\x07 \x03(\x0b\x32\x37.google.cloud.dataproc.v1beta2.SparkJob.PropertiesEntry\x12\x44\n\x0elogging_config\x18\x08 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.LoggingConfig\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x08\n\x06\x64river"\xda\x02\n\nPySparkJob\x12\x1c\n\x14main_python_file_uri\x18\x01 \x01(\t\x12\x0c\n\x04\x61rgs\x18\x02 \x03(\t\x12\x18\n\x10python_file_uris\x18\x03 \x03(\t\x12\x15\n\rjar_file_uris\x18\x04 \x03(\t\x12\x11\n\tfile_uris\x18\x05 \x03(\t\x12\x14\n\x0c\x61rchive_uris\x18\x06 \x03(\t\x12M\n\nproperties\x18\x07 \x03(\x0b\x32\x39.google.cloud.dataproc.v1beta2.PySparkJob.PropertiesEntry\x12\x44\n\x0elogging_config\x18\x08 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.LoggingConfig\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x1c\n\tQueryList\x12\x0f\n\x07queries\x18\x01 \x03(\t"\xb0\x03\n\x07HiveJob\x12\x18\n\x0equery_file_uri\x18\x01 \x01(\tH\x00\x12>\n\nquery_list\x18\x02 \x01(\x0b\x32(.google.cloud.dataproc.v1beta2.QueryListH\x00\x12\x1b\n\x13\x63ontinue_on_failure\x18\x03 \x01(\x08\x12U\n\x10script_variables\x18\x04 \x03(\x0b\x32;.google.cloud.dataproc.v1beta2.HiveJob.ScriptVariablesEntry\x12J\n\nproperties\x18\x05 \x03(\x0b\x32\x36.google.cloud.dataproc.v1beta2.HiveJob.PropertiesEntry\x12\x15\n\rjar_file_uris\x18\x06 \x03(\t\x1a\x36\n\x14ScriptVariablesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\t\n\x07queries"\xe5\x03\n\x0bSparkSqlJob\x12\x18\n\x0equery_file_uri\x18\x01 \x01(\tH\x00\x12>\n\nquery_list\x18\x02 \x01(\x0b\x32(.google.cloud.dataproc.v1beta2.QueryListH\x00\x12Y\n\x10script_variables\x18\x03 \x03(\x0b\x32?.google.cloud.dataproc.v1beta2.SparkSqlJob.ScriptVariablesEntry\x12N\n\nproperties\x18\x04 \x03(\x0b\x32:.google.cloud.dataproc.v1beta2.SparkSqlJob.PropertiesEntry\x12\x15\n\rjar_file_uris\x18\x38 \x03(\t\x12\x44\n\x0elogging_config\x18\x06 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.LoggingConfig\x1a\x36\n\x14ScriptVariablesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\t\n\x07queries"\xf3\x03\n\x06PigJob\x12\x18\n\x0equery_file_uri\x18\x01 \x01(\tH\x00\x12>\n\nquery_list\x18\x02 \x01(\x0b\x32(.google.cloud.dataproc.v1beta2.QueryListH\x00\x12\x1b\n\x13\x63ontinue_on_failure\x18\x03 \x01(\x08\x12T\n\x10script_variables\x18\x04 \x03(\x0b\x32:.google.cloud.dataproc.v1beta2.PigJob.ScriptVariablesEntry\x12I\n\nproperties\x18\x05 \x03(\x0b\x32\x35.google.cloud.dataproc.v1beta2.PigJob.PropertiesEntry\x12\x15\n\rjar_file_uris\x18\x06 \x03(\t\x12\x44\n\x0elogging_config\x18\x07 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.LoggingConfig\x1a\x36\n\x14ScriptVariablesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\t\n\x07queries"\xa2\x02\n\tSparkRJob\x12\x17\n\x0fmain_r_file_uri\x18\x01 \x01(\t\x12\x0c\n\x04\x61rgs\x18\x02 \x03(\t\x12\x11\n\tfile_uris\x18\x03 \x03(\t\x12\x14\n\x0c\x61rchive_uris\x18\x04 \x03(\t\x12L\n\nproperties\x18\x05 \x03(\x0b\x32\x38.google.cloud.dataproc.v1beta2.SparkRJob.PropertiesEntry\x12\x44\n\x0elogging_config\x18\x06 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.LoggingConfig\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01":\n\x0cJobPlacement\x12\x14\n\x0c\x63luster_name\x18\x01 \x01(\t\x12\x14\n\x0c\x63luster_uuid\x18\x02 \x01(\t"\xcc\x03\n\tJobStatus\x12=\n\x05state\x18\x01 \x01(\x0e\x32..google.cloud.dataproc.v1beta2.JobStatus.State\x12\x0f\n\x07\x64\x65tails\x18\x02 \x01(\t\x12\x34\n\x10state_start_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x43\n\x08substate\x18\x07 \x01(\x0e\x32\x31.google.cloud.dataproc.v1beta2.JobStatus.Substate"\xa9\x01\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07PENDING\x10\x01\x12\x0e\n\nSETUP_DONE\x10\x08\x12\x0b\n\x07RUNNING\x10\x02\x12\x12\n\x0e\x43\x41NCEL_PENDING\x10\x03\x12\x12\n\x0e\x43\x41NCEL_STARTED\x10\x07\x12\r\n\tCANCELLED\x10\x04\x12\x08\n\x04\x44ONE\x10\x05\x12\t\n\x05\x45RROR\x10\x06\x12\x13\n\x0f\x41TTEMPT_FAILURE\x10\t"H\n\x08Substate\x12\x0f\n\x0bUNSPECIFIED\x10\x00\x12\r\n\tSUBMITTED\x10\x01\x12\n\n\x06QUEUED\x10\x02\x12\x10\n\x0cSTALE_STATUS\x10\x03"2\n\x0cJobReference\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x0e\n\x06job_id\x18\x02 \x01(\t"\x96\x02\n\x0fYarnApplication\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x43\n\x05state\x18\x02 \x01(\x0e\x32\x34.google.cloud.dataproc.v1beta2.YarnApplication.State\x12\x10\n\x08progress\x18\x03 \x01(\x02\x12\x14\n\x0ctracking_url\x18\x04 \x01(\t"\x87\x01\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x07\n\x03NEW\x10\x01\x12\x0e\n\nNEW_SAVING\x10\x02\x12\r\n\tSUBMITTED\x10\x03\x12\x0c\n\x08\x41\x43\x43\x45PTED\x10\x04\x12\x0b\n\x07RUNNING\x10\x05\x12\x0c\n\x08\x46INISHED\x10\x06\x12\n\n\x06\x46\x41ILED\x10\x07\x12\n\n\x06KILLED\x10\x08"\xb3\x08\n\x03Job\x12>\n\treference\x18\x01 \x01(\x0b\x32+.google.cloud.dataproc.v1beta2.JobReference\x12>\n\tplacement\x18\x02 \x01(\x0b\x32+.google.cloud.dataproc.v1beta2.JobPlacement\x12>\n\nhadoop_job\x18\x03 \x01(\x0b\x32(.google.cloud.dataproc.v1beta2.HadoopJobH\x00\x12<\n\tspark_job\x18\x04 \x01(\x0b\x32\'.google.cloud.dataproc.v1beta2.SparkJobH\x00\x12@\n\x0bpyspark_job\x18\x05 \x01(\x0b\x32).google.cloud.dataproc.v1beta2.PySparkJobH\x00\x12:\n\x08hive_job\x18\x06 \x01(\x0b\x32&.google.cloud.dataproc.v1beta2.HiveJobH\x00\x12\x38\n\x07pig_job\x18\x07 \x01(\x0b\x32%.google.cloud.dataproc.v1beta2.PigJobH\x00\x12?\n\x0bspark_r_job\x18\x15 \x01(\x0b\x32(.google.cloud.dataproc.v1beta2.SparkRJobH\x00\x12\x43\n\rspark_sql_job\x18\x0c \x01(\x0b\x32*.google.cloud.dataproc.v1beta2.SparkSqlJobH\x00\x12\x38\n\x06status\x18\x08 \x01(\x0b\x32(.google.cloud.dataproc.v1beta2.JobStatus\x12@\n\x0estatus_history\x18\r \x03(\x0b\x32(.google.cloud.dataproc.v1beta2.JobStatus\x12I\n\x11yarn_applications\x18\t \x03(\x0b\x32..google.cloud.dataproc.v1beta2.YarnApplication\x12\x14\n\x0csubmitted_by\x18\n \x01(\t\x12"\n\x1a\x64river_output_resource_uri\x18\x11 \x01(\t\x12 \n\x18\x64river_control_files_uri\x18\x0f \x01(\t\x12>\n\x06labels\x18\x12 \x03(\x0b\x32..google.cloud.dataproc.v1beta2.Job.LabelsEntry\x12@\n\nscheduling\x18\x14 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.JobScheduling\x12\x10\n\x08job_uuid\x18\x16 \x01(\t\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\n\n\x08type_job".\n\rJobScheduling\x12\x1d\n\x15max_failures_per_hour\x18\x01 \x01(\x05"{\n\x10SubmitJobRequest\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x0e\n\x06region\x18\x03 \x01(\t\x12/\n\x03job\x18\x02 \x01(\x0b\x32".google.cloud.dataproc.v1beta2.Job\x12\x12\n\nrequest_id\x18\x04 \x01(\t"C\n\rGetJobRequest\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x0e\n\x06region\x18\x03 \x01(\t\x12\x0e\n\x06job_id\x18\x02 \x01(\t"\x95\x02\n\x0fListJobsRequest\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x0e\n\x06region\x18\x06 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\x12\x14\n\x0c\x63luster_name\x18\x04 \x01(\t\x12Y\n\x11job_state_matcher\x18\x05 \x01(\x0e\x32>.google.cloud.dataproc.v1beta2.ListJobsRequest.JobStateMatcher\x12\x0e\n\x06\x66ilter\x18\x07 \x01(\t"6\n\x0fJobStateMatcher\x12\x07\n\x03\x41LL\x10\x00\x12\n\n\x06\x41\x43TIVE\x10\x01\x12\x0e\n\nNON_ACTIVE\x10\x02"\xa8\x01\n\x10UpdateJobRequest\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x0e\n\x06region\x18\x02 \x01(\t\x12\x0e\n\x06job_id\x18\x03 \x01(\t\x12/\n\x03job\x18\x04 \x01(\x0b\x32".google.cloud.dataproc.v1beta2.Job\x12/\n\x0bupdate_mask\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"]\n\x10ListJobsResponse\x12\x30\n\x04jobs\x18\x01 \x03(\x0b\x32".google.cloud.dataproc.v1beta2.Job\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"F\n\x10\x43\x61ncelJobRequest\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x0e\n\x06region\x18\x03 \x01(\t\x12\x0e\n\x06job_id\x18\x02 \x01(\t"F\n\x10\x44\x65leteJobRequest\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x0e\n\x06region\x18\x03 \x01(\t\x12\x0e\n\x06job_id\x18\x02 \x01(\t2\x8b\x08\n\rJobController\x12\xa8\x01\n\tSubmitJob\x12/.google.cloud.dataproc.v1beta2.SubmitJobRequest\x1a".google.cloud.dataproc.v1beta2.Job"F\x82\xd3\xe4\x93\x02@";/v1beta2/projects/{project_id}/regions/{region}/jobs:submit:\x01*\x12\xa1\x01\n\x06GetJob\x12,.google.cloud.dataproc.v1beta2.GetJobRequest\x1a".google.cloud.dataproc.v1beta2.Job"E\x82\xd3\xe4\x93\x02?\x12=/v1beta2/projects/{project_id}/regions/{region}/jobs/{job_id}\x12\xa9\x01\n\x08ListJobs\x12..google.cloud.dataproc.v1beta2.ListJobsRequest\x1a/.google.cloud.dataproc.v1beta2.ListJobsResponse"<\x82\xd3\xe4\x93\x02\x36\x12\x34/v1beta2/projects/{project_id}/regions/{region}/jobs\x12\xac\x01\n\tUpdateJob\x12/.google.cloud.dataproc.v1beta2.UpdateJobRequest\x1a".google.cloud.dataproc.v1beta2.Job"J\x82\xd3\xe4\x93\x02\x44\x32=/v1beta2/projects/{project_id}/regions/{region}/jobs/{job_id}:\x03job\x12\xb1\x01\n\tCancelJob\x12/.google.cloud.dataproc.v1beta2.CancelJobRequest\x1a".google.cloud.dataproc.v1beta2.Job"O\x82\xd3\xe4\x93\x02I"D/v1beta2/projects/{project_id}/regions/{region}/jobs/{job_id}:cancel:\x01*\x12\x9b\x01\n\tDeleteJob\x12/.google.cloud.dataproc.v1beta2.DeleteJobRequest\x1a\x16.google.protobuf.Empty"E\x82\xd3\xe4\x93\x02?*=/v1beta2/projects/{project_id}/regions/{region}/jobs/{job_id}Bw\n!com.google.cloud.dataproc.v1beta2B\tJobsProtoP\x01ZEgoogle.golang.org/genproto/googleapis/cloud/dataproc/v1beta2;dataprocb\x06proto3' + '\n.google/cloud/dataproc_v1beta2/proto/jobs.proto\x12\x1dgoogle.cloud.dataproc.v1beta2\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xcb\x02\n\rLoggingConfig\x12\\\n\x11\x64river_log_levels\x18\x02 \x03(\x0b\x32\x41.google.cloud.dataproc.v1beta2.LoggingConfig.DriverLogLevelsEntry\x1aj\n\x14\x44riverLogLevelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x41\n\x05value\x18\x02 \x01(\x0e\x32\x32.google.cloud.dataproc.v1beta2.LoggingConfig.Level:\x02\x38\x01"p\n\x05Level\x12\x15\n\x11LEVEL_UNSPECIFIED\x10\x00\x12\x07\n\x03\x41LL\x10\x01\x12\t\n\x05TRACE\x10\x02\x12\t\n\x05\x44\x45\x42UG\x10\x03\x12\x08\n\x04INFO\x10\x04\x12\x08\n\x04WARN\x10\x05\x12\t\n\x05\x45RROR\x10\x06\x12\t\n\x05\x46\x41TAL\x10\x07\x12\x07\n\x03OFF\x10\x08"\xdd\x02\n\tHadoopJob\x12\x1b\n\x11main_jar_file_uri\x18\x01 \x01(\tH\x00\x12\x14\n\nmain_class\x18\x02 \x01(\tH\x00\x12\x0c\n\x04\x61rgs\x18\x03 \x03(\t\x12\x15\n\rjar_file_uris\x18\x04 \x03(\t\x12\x11\n\tfile_uris\x18\x05 \x03(\t\x12\x14\n\x0c\x61rchive_uris\x18\x06 \x03(\t\x12L\n\nproperties\x18\x07 \x03(\x0b\x32\x38.google.cloud.dataproc.v1beta2.HadoopJob.PropertiesEntry\x12\x44\n\x0elogging_config\x18\x08 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.LoggingConfig\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x08\n\x06\x64river"\xdb\x02\n\x08SparkJob\x12\x1b\n\x11main_jar_file_uri\x18\x01 \x01(\tH\x00\x12\x14\n\nmain_class\x18\x02 \x01(\tH\x00\x12\x0c\n\x04\x61rgs\x18\x03 \x03(\t\x12\x15\n\rjar_file_uris\x18\x04 \x03(\t\x12\x11\n\tfile_uris\x18\x05 \x03(\t\x12\x14\n\x0c\x61rchive_uris\x18\x06 \x03(\t\x12K\n\nproperties\x18\x07 \x03(\x0b\x32\x37.google.cloud.dataproc.v1beta2.SparkJob.PropertiesEntry\x12\x44\n\x0elogging_config\x18\x08 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.LoggingConfig\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x08\n\x06\x64river"\xdf\x02\n\nPySparkJob\x12!\n\x14main_python_file_uri\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x0c\n\x04\x61rgs\x18\x02 \x03(\t\x12\x18\n\x10python_file_uris\x18\x03 \x03(\t\x12\x15\n\rjar_file_uris\x18\x04 \x03(\t\x12\x11\n\tfile_uris\x18\x05 \x03(\t\x12\x14\n\x0c\x61rchive_uris\x18\x06 \x03(\t\x12M\n\nproperties\x18\x07 \x03(\x0b\x32\x39.google.cloud.dataproc.v1beta2.PySparkJob.PropertiesEntry\x12\x44\n\x0elogging_config\x18\x08 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.LoggingConfig\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"!\n\tQueryList\x12\x14\n\x07queries\x18\x01 \x03(\tB\x03\xe0\x41\x02"\xb0\x03\n\x07HiveJob\x12\x18\n\x0equery_file_uri\x18\x01 \x01(\tH\x00\x12>\n\nquery_list\x18\x02 \x01(\x0b\x32(.google.cloud.dataproc.v1beta2.QueryListH\x00\x12\x1b\n\x13\x63ontinue_on_failure\x18\x03 \x01(\x08\x12U\n\x10script_variables\x18\x04 \x03(\x0b\x32;.google.cloud.dataproc.v1beta2.HiveJob.ScriptVariablesEntry\x12J\n\nproperties\x18\x05 \x03(\x0b\x32\x36.google.cloud.dataproc.v1beta2.HiveJob.PropertiesEntry\x12\x15\n\rjar_file_uris\x18\x06 \x03(\t\x1a\x36\n\x14ScriptVariablesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\t\n\x07queries"\xe5\x03\n\x0bSparkSqlJob\x12\x18\n\x0equery_file_uri\x18\x01 \x01(\tH\x00\x12>\n\nquery_list\x18\x02 \x01(\x0b\x32(.google.cloud.dataproc.v1beta2.QueryListH\x00\x12Y\n\x10script_variables\x18\x03 \x03(\x0b\x32?.google.cloud.dataproc.v1beta2.SparkSqlJob.ScriptVariablesEntry\x12N\n\nproperties\x18\x04 \x03(\x0b\x32:.google.cloud.dataproc.v1beta2.SparkSqlJob.PropertiesEntry\x12\x15\n\rjar_file_uris\x18\x38 \x03(\t\x12\x44\n\x0elogging_config\x18\x06 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.LoggingConfig\x1a\x36\n\x14ScriptVariablesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\t\n\x07queries"\xf3\x03\n\x06PigJob\x12\x18\n\x0equery_file_uri\x18\x01 \x01(\tH\x00\x12>\n\nquery_list\x18\x02 \x01(\x0b\x32(.google.cloud.dataproc.v1beta2.QueryListH\x00\x12\x1b\n\x13\x63ontinue_on_failure\x18\x03 \x01(\x08\x12T\n\x10script_variables\x18\x04 \x03(\x0b\x32:.google.cloud.dataproc.v1beta2.PigJob.ScriptVariablesEntry\x12I\n\nproperties\x18\x05 \x03(\x0b\x32\x35.google.cloud.dataproc.v1beta2.PigJob.PropertiesEntry\x12\x15\n\rjar_file_uris\x18\x06 \x03(\t\x12\x44\n\x0elogging_config\x18\x07 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.LoggingConfig\x1a\x36\n\x14ScriptVariablesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\t\n\x07queries"\xa7\x02\n\tSparkRJob\x12\x1c\n\x0fmain_r_file_uri\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x0c\n\x04\x61rgs\x18\x02 \x03(\t\x12\x11\n\tfile_uris\x18\x03 \x03(\t\x12\x14\n\x0c\x61rchive_uris\x18\x04 \x03(\t\x12L\n\nproperties\x18\x05 \x03(\x0b\x32\x38.google.cloud.dataproc.v1beta2.SparkRJob.PropertiesEntry\x12\x44\n\x0elogging_config\x18\x06 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.LoggingConfig\x1a\x31\n\x0fPropertiesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"?\n\x0cJobPlacement\x12\x19\n\x0c\x63luster_name\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x14\n\x0c\x63luster_uuid\x18\x02 \x01(\t"\xcc\x03\n\tJobStatus\x12=\n\x05state\x18\x01 \x01(\x0e\x32..google.cloud.dataproc.v1beta2.JobStatus.State\x12\x0f\n\x07\x64\x65tails\x18\x02 \x01(\t\x12\x34\n\x10state_start_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x43\n\x08substate\x18\x07 \x01(\x0e\x32\x31.google.cloud.dataproc.v1beta2.JobStatus.Substate"\xa9\x01\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07PENDING\x10\x01\x12\x0e\n\nSETUP_DONE\x10\x08\x12\x0b\n\x07RUNNING\x10\x02\x12\x12\n\x0e\x43\x41NCEL_PENDING\x10\x03\x12\x12\n\x0e\x43\x41NCEL_STARTED\x10\x07\x12\r\n\tCANCELLED\x10\x04\x12\x08\n\x04\x44ONE\x10\x05\x12\t\n\x05\x45RROR\x10\x06\x12\x13\n\x0f\x41TTEMPT_FAILURE\x10\t"H\n\x08Substate\x12\x0f\n\x0bUNSPECIFIED\x10\x00\x12\r\n\tSUBMITTED\x10\x01\x12\n\n\x06QUEUED\x10\x02\x12\x10\n\x0cSTALE_STATUS\x10\x03"7\n\x0cJobReference\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x0e\n\x06job_id\x18\x02 \x01(\t"\xaa\x02\n\x0fYarnApplication\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12H\n\x05state\x18\x02 \x01(\x0e\x32\x34.google.cloud.dataproc.v1beta2.YarnApplication.StateB\x03\xe0\x41\x03\x12\x15\n\x08progress\x18\x03 \x01(\x02\x42\x03\xe0\x41\x03\x12\x19\n\x0ctracking_url\x18\x04 \x01(\tB\x03\xe0\x41\x03"\x87\x01\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x07\n\x03NEW\x10\x01\x12\x0e\n\nNEW_SAVING\x10\x02\x12\r\n\tSUBMITTED\x10\x03\x12\x0c\n\x08\x41\x43\x43\x45PTED\x10\x04\x12\x0b\n\x07RUNNING\x10\x05\x12\x0c\n\x08\x46INISHED\x10\x06\x12\n\n\x06\x46\x41ILED\x10\x07\x12\n\n\x06KILLED\x10\x08"\xb8\x08\n\x03Job\x12>\n\treference\x18\x01 \x01(\x0b\x32+.google.cloud.dataproc.v1beta2.JobReference\x12\x43\n\tplacement\x18\x02 \x01(\x0b\x32+.google.cloud.dataproc.v1beta2.JobPlacementB\x03\xe0\x41\x02\x12>\n\nhadoop_job\x18\x03 \x01(\x0b\x32(.google.cloud.dataproc.v1beta2.HadoopJobH\x00\x12<\n\tspark_job\x18\x04 \x01(\x0b\x32\'.google.cloud.dataproc.v1beta2.SparkJobH\x00\x12@\n\x0bpyspark_job\x18\x05 \x01(\x0b\x32).google.cloud.dataproc.v1beta2.PySparkJobH\x00\x12:\n\x08hive_job\x18\x06 \x01(\x0b\x32&.google.cloud.dataproc.v1beta2.HiveJobH\x00\x12\x38\n\x07pig_job\x18\x07 \x01(\x0b\x32%.google.cloud.dataproc.v1beta2.PigJobH\x00\x12?\n\x0bspark_r_job\x18\x15 \x01(\x0b\x32(.google.cloud.dataproc.v1beta2.SparkRJobH\x00\x12\x43\n\rspark_sql_job\x18\x0c \x01(\x0b\x32*.google.cloud.dataproc.v1beta2.SparkSqlJobH\x00\x12\x38\n\x06status\x18\x08 \x01(\x0b\x32(.google.cloud.dataproc.v1beta2.JobStatus\x12@\n\x0estatus_history\x18\r \x03(\x0b\x32(.google.cloud.dataproc.v1beta2.JobStatus\x12I\n\x11yarn_applications\x18\t \x03(\x0b\x32..google.cloud.dataproc.v1beta2.YarnApplication\x12\x14\n\x0csubmitted_by\x18\n \x01(\t\x12"\n\x1a\x64river_output_resource_uri\x18\x11 \x01(\t\x12 \n\x18\x64river_control_files_uri\x18\x0f \x01(\t\x12>\n\x06labels\x18\x12 \x03(\x0b\x32..google.cloud.dataproc.v1beta2.Job.LabelsEntry\x12@\n\nscheduling\x18\x14 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.JobScheduling\x12\x10\n\x08job_uuid\x18\x16 \x01(\t\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\n\n\x08type_job".\n\rJobScheduling\x12\x1d\n\x15max_failures_per_hour\x18\x01 \x01(\x05"\x8a\x01\n\x10SubmitJobRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x34\n\x03job\x18\x02 \x01(\x0b\x32".google.cloud.dataproc.v1beta2.JobB\x03\xe0\x41\x02\x12\x12\n\nrequest_id\x18\x04 \x01(\t"R\n\rGetJobRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06job_id\x18\x02 \x01(\tB\x03\xe0\x41\x02"\x9f\x02\n\x0fListJobsRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x06 \x01(\tB\x03\xe0\x41\x02\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\x12\x14\n\x0c\x63luster_name\x18\x04 \x01(\t\x12Y\n\x11job_state_matcher\x18\x05 \x01(\x0e\x32>.google.cloud.dataproc.v1beta2.ListJobsRequest.JobStateMatcher\x12\x0e\n\x06\x66ilter\x18\x07 \x01(\t"6\n\x0fJobStateMatcher\x12\x07\n\x03\x41LL\x10\x00\x12\n\n\x06\x41\x43TIVE\x10\x01\x12\x0e\n\nNON_ACTIVE\x10\x02"\xc1\x01\n\x10UpdateJobRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06job_id\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x34\n\x03job\x18\x04 \x01(\x0b\x32".google.cloud.dataproc.v1beta2.JobB\x03\xe0\x41\x02\x12\x34\n\x0bupdate_mask\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x02"]\n\x10ListJobsResponse\x12\x30\n\x04jobs\x18\x01 \x03(\x0b\x32".google.cloud.dataproc.v1beta2.Job\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"U\n\x10\x43\x61ncelJobRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06job_id\x18\x02 \x01(\tB\x03\xe0\x41\x02"U\n\x10\x44\x65leteJobRequest\x12\x17\n\nproject_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06region\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x13\n\x06job_id\x18\x02 \x01(\tB\x03\xe0\x41\x02\x32\xfb\t\n\rJobController\x12\xc2\x01\n\tSubmitJob\x12/.google.cloud.dataproc.v1beta2.SubmitJobRequest\x1a".google.cloud.dataproc.v1beta2.Job"`\x82\xd3\xe4\x93\x02@";/v1beta2/projects/{project_id}/regions/{region}/jobs:submit:\x01*\xda\x41\x17project_id, region, job\x12\xbe\x01\n\x06GetJob\x12,.google.cloud.dataproc.v1beta2.GetJobRequest\x1a".google.cloud.dataproc.v1beta2.Job"b\x82\xd3\xe4\x93\x02?\x12=/v1beta2/projects/{project_id}/regions/{region}/jobs/{job_id}\xda\x41\x1aproject_id, region, job_id\x12\xdb\x01\n\x08ListJobs\x12..google.cloud.dataproc.v1beta2.ListJobsRequest\x1a/.google.cloud.dataproc.v1beta2.ListJobsResponse"n\x82\xd3\xe4\x93\x02\x36\x12\x34/v1beta2/projects/{project_id}/regions/{region}/jobs\xda\x41\x12project_id, region\xda\x41\x1aproject_id, region, filter\x12\xac\x01\n\tUpdateJob\x12/.google.cloud.dataproc.v1beta2.UpdateJobRequest\x1a".google.cloud.dataproc.v1beta2.Job"J\x82\xd3\xe4\x93\x02\x44\x32=/v1beta2/projects/{project_id}/regions/{region}/jobs/{job_id}:\x03job\x12\xce\x01\n\tCancelJob\x12/.google.cloud.dataproc.v1beta2.CancelJobRequest\x1a".google.cloud.dataproc.v1beta2.Job"l\x82\xd3\xe4\x93\x02I"D/v1beta2/projects/{project_id}/regions/{region}/jobs/{job_id}:cancel:\x01*\xda\x41\x1aproject_id, region, job_id\x12\xb8\x01\n\tDeleteJob\x12/.google.cloud.dataproc.v1beta2.DeleteJobRequest\x1a\x16.google.protobuf.Empty"b\x82\xd3\xe4\x93\x02?*=/v1beta2/projects/{project_id}/regions/{region}/jobs/{job_id}\xda\x41\x1aproject_id, region, job_id\x1aK\xca\x41\x17\x64\x61taproc.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformBw\n!com.google.cloud.dataproc.v1beta2B\tJobsProtoP\x01ZEgoogle.golang.org/genproto/googleapis/cloud/dataproc/v1beta2;dataprocb\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_client__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR, google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, @@ -80,8 +84,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=427, - serialized_end=539, + serialized_start=485, + serialized_end=597, ) _sym_db.RegisterEnumDescriptor(_LOGGINGCONFIG_LEVEL) @@ -132,8 +136,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=3618, - serialized_end=3787, + serialized_start=3696, + serialized_end=3865, ) _sym_db.RegisterEnumDescriptor(_JOBSTATUS_STATE) @@ -158,8 +162,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=3789, - serialized_end=3861, + serialized_start=3867, + serialized_end=3939, ) _sym_db.RegisterEnumDescriptor(_JOBSTATUS_SUBSTATE) @@ -203,8 +207,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=4059, - serialized_end=4194, + serialized_start=4162, + serialized_end=4297, ) _sym_db.RegisterEnumDescriptor(_YARNAPPLICATION_STATE) @@ -226,8 +230,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=5740, - serialized_end=5794, + serialized_start=5889, + serialized_end=5943, ) _sym_db.RegisterEnumDescriptor(_LISTJOBSREQUEST_JOBSTATEMATCHER) @@ -284,8 +288,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=319, - serialized_end=425, + serialized_start=377, + serialized_end=483, ) _LOGGINGCONFIG = _descriptor.Descriptor( @@ -322,8 +326,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=208, - serialized_end=539, + serialized_start=266, + serialized_end=597, ) @@ -379,8 +383,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=832, - serialized_end=881, + serialized_start=890, + serialized_end=939, ) _HADOOPJOB = _descriptor.Descriptor( @@ -551,8 +555,8 @@ fields=[], ) ], - serialized_start=542, - serialized_end=891, + serialized_start=600, + serialized_end=949, ) @@ -608,8 +612,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=832, - serialized_end=881, + serialized_start=890, + serialized_end=939, ) _SPARKJOB = _descriptor.Descriptor( @@ -780,8 +784,8 @@ fields=[], ) ], - serialized_start=894, - serialized_end=1241, + serialized_start=952, + serialized_end=1299, ) @@ -837,8 +841,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=832, - serialized_end=881, + serialized_start=890, + serialized_end=939, ) _PYSPARKJOB = _descriptor.Descriptor( @@ -863,7 +867,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1001,8 +1005,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1244, - serialized_end=1590, + serialized_start=1302, + serialized_end=1653, ) @@ -1028,7 +1032,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ) ], @@ -1040,8 +1044,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1592, - serialized_end=1620, + serialized_start=1655, + serialized_end=1688, ) @@ -1097,8 +1101,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1939, - serialized_end=1993, + serialized_start=2007, + serialized_end=2061, ) _HIVEJOB_PROPERTIESENTRY = _descriptor.Descriptor( @@ -1153,8 +1157,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=832, - serialized_end=881, + serialized_start=890, + serialized_end=939, ) _HIVEJOB = _descriptor.Descriptor( @@ -1289,8 +1293,8 @@ fields=[], ) ], - serialized_start=1623, - serialized_end=2055, + serialized_start=1691, + serialized_end=2123, ) @@ -1346,8 +1350,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1939, - serialized_end=1993, + serialized_start=2007, + serialized_end=2061, ) _SPARKSQLJOB_PROPERTIESENTRY = _descriptor.Descriptor( @@ -1402,8 +1406,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=832, - serialized_end=881, + serialized_start=890, + serialized_end=939, ) _SPARKSQLJOB = _descriptor.Descriptor( @@ -1538,8 +1542,8 @@ fields=[], ) ], - serialized_start=2058, - serialized_end=2543, + serialized_start=2126, + serialized_end=2611, ) @@ -1595,8 +1599,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1939, - serialized_end=1993, + serialized_start=2007, + serialized_end=2061, ) _PIGJOB_PROPERTIESENTRY = _descriptor.Descriptor( @@ -1651,8 +1655,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=832, - serialized_end=881, + serialized_start=890, + serialized_end=939, ) _PIGJOB = _descriptor.Descriptor( @@ -1805,8 +1809,8 @@ fields=[], ) ], - serialized_start=2546, - serialized_end=3045, + serialized_start=2614, + serialized_end=3113, ) @@ -1862,8 +1866,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=832, - serialized_end=881, + serialized_start=890, + serialized_end=939, ) _SPARKRJOB = _descriptor.Descriptor( @@ -1888,7 +1892,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1990,8 +1994,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3048, - serialized_end=3338, + serialized_start=3116, + serialized_end=3411, ) @@ -2017,7 +2021,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2047,8 +2051,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3340, - serialized_end=3398, + serialized_start=3413, + serialized_end=3476, ) @@ -2140,8 +2144,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3401, - serialized_end=3861, + serialized_start=3479, + serialized_end=3939, ) @@ -2167,7 +2171,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2197,8 +2201,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3863, - serialized_end=3913, + serialized_start=3941, + serialized_end=3996, ) @@ -2224,7 +2228,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2242,7 +2246,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2260,7 +2264,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2278,7 +2282,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], @@ -2290,8 +2294,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3916, - serialized_end=4194, + serialized_start=3999, + serialized_end=4297, ) @@ -2347,8 +2351,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5215, - serialized_end=5260, + serialized_start=5323, + serialized_end=5368, ) _JOB = _descriptor.Descriptor( @@ -2391,7 +2395,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2699,8 +2703,8 @@ fields=[], ) ], - serialized_start=4197, - serialized_end=5272, + serialized_start=4300, + serialized_end=5380, ) @@ -2738,8 +2742,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5274, - serialized_end=5320, + serialized_start=5382, + serialized_end=5428, ) @@ -2765,7 +2769,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2783,7 +2787,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2801,7 +2805,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2831,8 +2835,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5322, - serialized_end=5445, + serialized_start=5431, + serialized_end=5569, ) @@ -2858,7 +2862,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2876,7 +2880,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2894,7 +2898,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -2906,8 +2910,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5447, - serialized_end=5514, + serialized_start=5571, + serialized_end=5653, ) @@ -2933,7 +2937,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2951,7 +2955,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -3053,8 +3057,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5517, - serialized_end=5794, + serialized_start=5656, + serialized_end=5943, ) @@ -3080,7 +3084,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -3098,7 +3102,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -3116,7 +3120,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -3134,7 +3138,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -3152,7 +3156,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -3164,8 +3168,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5797, - serialized_end=5965, + serialized_start=5946, + serialized_end=6139, ) @@ -3221,8 +3225,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5967, - serialized_end=6060, + serialized_start=6141, + serialized_end=6234, ) @@ -3248,7 +3252,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -3266,7 +3270,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -3284,7 +3288,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -3296,8 +3300,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=6062, - serialized_end=6132, + serialized_start=6236, + serialized_end=6321, ) @@ -3323,7 +3327,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -3341,7 +3345,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -3359,7 +3363,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -3371,8 +3375,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=6134, - serialized_end=6204, + serialized_start=6323, + serialized_end=6408, ) _LOGGINGCONFIG_DRIVERLOGLEVELSENTRY.fields_by_name[ @@ -3655,17 +3659,15 @@ DESCRIPTOR=_SPARKJOB, __module__="google.cloud.dataproc_v1beta2.proto.jobs_pb2", __doc__="""A Cloud Dataproc job for running `Apache - Spark `__ applications on YARN. + Spark `__ applications on YARN. The + specification of the main method to call to drive the job. Specify + either the jar file that contains the main class or the main class name. + To pass both a main jar and a main class in that jar, add the jar to + ``CommonJob.jar_file_uris``, and then specify the main class name in + ``main_class``. Attributes: - driver: - Required. The specification of the main method to call to - drive the job. Specify either the jar file that contains the - main class or the main class name. To pass both a main jar and - a main class in that jar, add the jar to - ``CommonJob.jar_file_uris``, and then specify the main class - name in ``main_class``. main_jar_file_uri: The HCFS URI of the jar file that contains the main class. main_class: @@ -4104,14 +4106,14 @@ Attributes: name: - Required. The application name. + Output only. The application name. state: - Required. The application state. + Output only. The application state. progress: - Required. The numerical progress of the application, from 1 to - 100. + Output only. The numerical progress of the application, from 1 + to 100. tracking_url: - Optional. The HTTP URL of the ApplicationMaster, + Optional. Output only. The HTTP URL of the ApplicationMaster, HistoryServer, or TimelineServer that provides application- specific information. The URL uses the internal hostname, and requires a proxy server for resolution and, possibly, access. @@ -4443,6 +4445,8 @@ _HADOOPJOB_PROPERTIESENTRY._options = None _SPARKJOB_PROPERTIESENTRY._options = None _PYSPARKJOB_PROPERTIESENTRY._options = None +_PYSPARKJOB.fields_by_name["main_python_file_uri"]._options = None +_QUERYLIST.fields_by_name["queries"]._options = None _HIVEJOB_SCRIPTVARIABLESENTRY._options = None _HIVEJOB_PROPERTIESENTRY._options = None _SPARKSQLJOB_SCRIPTVARIABLESENTRY._options = None @@ -4450,16 +4454,45 @@ _PIGJOB_SCRIPTVARIABLESENTRY._options = None _PIGJOB_PROPERTIESENTRY._options = None _SPARKRJOB_PROPERTIESENTRY._options = None +_SPARKRJOB.fields_by_name["main_r_file_uri"]._options = None +_JOBPLACEMENT.fields_by_name["cluster_name"]._options = None +_JOBREFERENCE.fields_by_name["project_id"]._options = None +_YARNAPPLICATION.fields_by_name["name"]._options = None +_YARNAPPLICATION.fields_by_name["state"]._options = None +_YARNAPPLICATION.fields_by_name["progress"]._options = None +_YARNAPPLICATION.fields_by_name["tracking_url"]._options = None _JOB_LABELSENTRY._options = None +_JOB.fields_by_name["placement"]._options = None +_SUBMITJOBREQUEST.fields_by_name["project_id"]._options = None +_SUBMITJOBREQUEST.fields_by_name["region"]._options = None +_SUBMITJOBREQUEST.fields_by_name["job"]._options = None +_GETJOBREQUEST.fields_by_name["project_id"]._options = None +_GETJOBREQUEST.fields_by_name["region"]._options = None +_GETJOBREQUEST.fields_by_name["job_id"]._options = None +_LISTJOBSREQUEST.fields_by_name["project_id"]._options = None +_LISTJOBSREQUEST.fields_by_name["region"]._options = None +_UPDATEJOBREQUEST.fields_by_name["project_id"]._options = None +_UPDATEJOBREQUEST.fields_by_name["region"]._options = None +_UPDATEJOBREQUEST.fields_by_name["job_id"]._options = None +_UPDATEJOBREQUEST.fields_by_name["job"]._options = None +_UPDATEJOBREQUEST.fields_by_name["update_mask"]._options = None +_CANCELJOBREQUEST.fields_by_name["project_id"]._options = None +_CANCELJOBREQUEST.fields_by_name["region"]._options = None +_CANCELJOBREQUEST.fields_by_name["job_id"]._options = None +_DELETEJOBREQUEST.fields_by_name["project_id"]._options = None +_DELETEJOBREQUEST.fields_by_name["region"]._options = None +_DELETEJOBREQUEST.fields_by_name["job_id"]._options = None _JOBCONTROLLER = _descriptor.ServiceDescriptor( name="JobController", full_name="google.cloud.dataproc.v1beta2.JobController", file=DESCRIPTOR, index=0, - serialized_options=None, - serialized_start=6207, - serialized_end=7242, + serialized_options=_b( + "\312A\027dataproc.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform" + ), + serialized_start=6411, + serialized_end=7686, methods=[ _descriptor.MethodDescriptor( name="SubmitJob", @@ -4469,7 +4502,7 @@ input_type=_SUBMITJOBREQUEST, output_type=_JOB, serialized_options=_b( - '\202\323\344\223\002@";/v1beta2/projects/{project_id}/regions/{region}/jobs:submit:\001*' + '\202\323\344\223\002@";/v1beta2/projects/{project_id}/regions/{region}/jobs:submit:\001*\332A\027project_id, region, job' ), ), _descriptor.MethodDescriptor( @@ -4480,7 +4513,7 @@ input_type=_GETJOBREQUEST, output_type=_JOB, serialized_options=_b( - "\202\323\344\223\002?\022=/v1beta2/projects/{project_id}/regions/{region}/jobs/{job_id}" + "\202\323\344\223\002?\022=/v1beta2/projects/{project_id}/regions/{region}/jobs/{job_id}\332A\032project_id, region, job_id" ), ), _descriptor.MethodDescriptor( @@ -4491,7 +4524,7 @@ input_type=_LISTJOBSREQUEST, output_type=_LISTJOBSRESPONSE, serialized_options=_b( - "\202\323\344\223\0026\0224/v1beta2/projects/{project_id}/regions/{region}/jobs" + "\202\323\344\223\0026\0224/v1beta2/projects/{project_id}/regions/{region}/jobs\332A\022project_id, region\332A\032project_id, region, filter" ), ), _descriptor.MethodDescriptor( @@ -4513,7 +4546,7 @@ input_type=_CANCELJOBREQUEST, output_type=_JOB, serialized_options=_b( - '\202\323\344\223\002I"D/v1beta2/projects/{project_id}/regions/{region}/jobs/{job_id}:cancel:\001*' + '\202\323\344\223\002I"D/v1beta2/projects/{project_id}/regions/{region}/jobs/{job_id}:cancel:\001*\332A\032project_id, region, job_id' ), ), _descriptor.MethodDescriptor( @@ -4524,7 +4557,7 @@ input_type=_DELETEJOBREQUEST, output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, serialized_options=_b( - "\202\323\344\223\002?*=/v1beta2/projects/{project_id}/regions/{region}/jobs/{job_id}" + "\202\323\344\223\002?*=/v1beta2/projects/{project_id}/regions/{region}/jobs/{job_id}\332A\032project_id, region, job_id" ), ), ], diff --git a/dataproc/google/cloud/dataproc_v1beta2/proto/operations.proto b/dataproc/google/cloud/dataproc_v1beta2/proto/operations.proto index 8f9252a46716..74cbde3cac69 100644 --- a/dataproc/google/cloud/dataproc_v1beta2/proto/operations.proto +++ b/dataproc/google/cloud/dataproc_v1beta2/proto/operations.proto @@ -17,8 +17,8 @@ syntax = "proto3"; package google.cloud.dataproc.v1beta2; -import "google/api/annotations.proto"; import "google/protobuf/timestamp.proto"; +import "google/api/annotations.proto"; option go_package = "google.golang.org/genproto/googleapis/cloud/dataproc/v1beta2;dataproc"; option java_multiple_files = true; diff --git a/dataproc/google/cloud/dataproc_v1beta2/proto/operations_pb2.py b/dataproc/google/cloud/dataproc_v1beta2/proto/operations_pb2.py index 6c260f017d19..8a1d63b987b2 100644 --- a/dataproc/google/cloud/dataproc_v1beta2/proto/operations_pb2.py +++ b/dataproc/google/cloud/dataproc_v1beta2/proto/operations_pb2.py @@ -15,8 +15,8 @@ _sym_db = _symbol_database.Default() -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 DESCRIPTOR = _descriptor.FileDescriptor( @@ -27,11 +27,11 @@ "\n!com.google.cloud.dataproc.v1beta2B\017OperationsProtoP\001ZEgoogle.golang.org/genproto/googleapis/cloud/dataproc/v1beta2;dataproc" ), serialized_pb=_b( - '\n4google/cloud/dataproc_v1beta2/proto/operations.proto\x12\x1dgoogle.cloud.dataproc.v1beta2\x1a\x1cgoogle/api/annotations.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xfa\x01\n\x16\x43lusterOperationStatus\x12J\n\x05state\x18\x01 \x01(\x0e\x32;.google.cloud.dataproc.v1beta2.ClusterOperationStatus.State\x12\x13\n\x0binner_state\x18\x02 \x01(\t\x12\x0f\n\x07\x64\x65tails\x18\x03 \x01(\t\x12\x34\n\x10state_start_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"8\n\x05State\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0b\n\x07PENDING\x10\x01\x12\x0b\n\x07RUNNING\x10\x02\x12\x08\n\x04\x44ONE\x10\x03"\x9f\x03\n\x18\x43lusterOperationMetadata\x12\x14\n\x0c\x63luster_name\x18\x07 \x01(\t\x12\x14\n\x0c\x63luster_uuid\x18\x08 \x01(\t\x12\x45\n\x06status\x18\t \x01(\x0b\x32\x35.google.cloud.dataproc.v1beta2.ClusterOperationStatus\x12M\n\x0estatus_history\x18\n \x03(\x0b\x32\x35.google.cloud.dataproc.v1beta2.ClusterOperationStatus\x12\x16\n\x0eoperation_type\x18\x0b \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x0c \x01(\t\x12S\n\x06labels\x18\r \x03(\x0b\x32\x43.google.cloud.dataproc.v1beta2.ClusterOperationMetadata.LabelsEntry\x12\x10\n\x08warnings\x18\x0e \x03(\t\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42}\n!com.google.cloud.dataproc.v1beta2B\x0fOperationsProtoP\x01ZEgoogle.golang.org/genproto/googleapis/cloud/dataproc/v1beta2;dataprocb\x06proto3' + '\n4google/cloud/dataproc_v1beta2/proto/operations.proto\x12\x1dgoogle.cloud.dataproc.v1beta2\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1cgoogle/api/annotations.proto"\xfa\x01\n\x16\x43lusterOperationStatus\x12J\n\x05state\x18\x01 \x01(\x0e\x32;.google.cloud.dataproc.v1beta2.ClusterOperationStatus.State\x12\x13\n\x0binner_state\x18\x02 \x01(\t\x12\x0f\n\x07\x64\x65tails\x18\x03 \x01(\t\x12\x34\n\x10state_start_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"8\n\x05State\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0b\n\x07PENDING\x10\x01\x12\x0b\n\x07RUNNING\x10\x02\x12\x08\n\x04\x44ONE\x10\x03"\x9f\x03\n\x18\x43lusterOperationMetadata\x12\x14\n\x0c\x63luster_name\x18\x07 \x01(\t\x12\x14\n\x0c\x63luster_uuid\x18\x08 \x01(\t\x12\x45\n\x06status\x18\t \x01(\x0b\x32\x35.google.cloud.dataproc.v1beta2.ClusterOperationStatus\x12M\n\x0estatus_history\x18\n \x03(\x0b\x32\x35.google.cloud.dataproc.v1beta2.ClusterOperationStatus\x12\x16\n\x0eoperation_type\x18\x0b \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x0c \x01(\t\x12S\n\x06labels\x18\r \x03(\x0b\x32\x43.google.cloud.dataproc.v1beta2.ClusterOperationMetadata.LabelsEntry\x12\x10\n\x08warnings\x18\x0e \x03(\t\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42}\n!com.google.cloud.dataproc.v1beta2B\x0fOperationsProtoP\x01ZEgoogle.golang.org/genproto/googleapis/cloud/dataproc/v1beta2;dataprocb\x06proto3' ), dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, + google_dot_api_dot_annotations__pb2.DESCRIPTOR, ], ) diff --git a/dataproc/google/cloud/dataproc_v1beta2/proto/workflow_templates.proto b/dataproc/google/cloud/dataproc_v1beta2/proto/workflow_templates.proto index edaf357cc6c5..6fd30974176c 100644 --- a/dataproc/google/cloud/dataproc_v1beta2/proto/workflow_templates.proto +++ b/dataproc/google/cloud/dataproc_v1beta2/proto/workflow_templates.proto @@ -18,6 +18,9 @@ syntax = "proto3"; package google.cloud.dataproc.v1beta2; import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; import "google/cloud/dataproc/v1beta2/clusters.proto"; import "google/cloud/dataproc/v1beta2/jobs.proto"; import "google/longrunning/operations.proto"; @@ -32,6 +35,9 @@ option java_package = "com.google.cloud.dataproc.v1beta2"; // The API interface for managing Workflow Templates in the // Cloud Dataproc API. service WorkflowTemplateService { + option (google.api.default_host) = "dataproc.googleapis.com"; + option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; + // Creates new workflow template. rpc CreateWorkflowTemplate(CreateWorkflowTemplateRequest) returns (WorkflowTemplate) { option (google.api.http) = { @@ -42,6 +48,7 @@ service WorkflowTemplateService { body: "template" } }; + option (google.api.method_signature) = "parent, template"; } // Retrieves the latest workflow template. @@ -55,6 +62,7 @@ service WorkflowTemplateService { get: "/v1beta2/{name=projects/*/locations/*/workflowTemplates/*}" } }; + option (google.api.method_signature) = "name"; } // Instantiates a template and begins execution. @@ -70,7 +78,9 @@ service WorkflowTemplateService { // clusters to be deleted. // // The [Operation.metadata][google.longrunning.Operation.metadata] will be - // [WorkflowMetadata][google.cloud.dataproc.v1beta2.WorkflowMetadata]. + // [WorkflowMetadata](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1beta2#workflowmetadata). + // Also see [Using + // WorkflowMetadata](/dataproc/docs/concepts/workflows/debugging#using_workflowmetadata). // // On successful completion, // [Operation.response][google.longrunning.Operation.response] will be @@ -84,6 +94,12 @@ service WorkflowTemplateService { body: "*" } }; + option (google.api.method_signature) = "name"; + option (google.api.method_signature) = "name, parameters"; + option (google.longrunning.operation_info) = { + response_type: "google.protobuf.Empty" + metadata_type: "WorkflowMetadata" + }; } // Instantiates a template and begins execution. @@ -103,7 +119,9 @@ service WorkflowTemplateService { // clusters to be deleted. // // The [Operation.metadata][google.longrunning.Operation.metadata] will be - // [WorkflowMetadata][google.cloud.dataproc.v1beta2.WorkflowMetadata]. + // [WorkflowMetadata](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#workflowmetadata). + // Also see [Using + // WorkflowMetadata](/dataproc/docs/concepts/workflows/debugging#using_workflowmetadata). // // On successful completion, // [Operation.response][google.longrunning.Operation.response] will be @@ -117,6 +135,11 @@ service WorkflowTemplateService { body: "template" } }; + option (google.api.method_signature) = "parent, template"; + option (google.longrunning.operation_info) = { + response_type: "google.protobuf.Empty" + metadata_type: "WorkflowMetadata" + }; } // Updates (replaces) workflow template. The updated template @@ -130,6 +153,7 @@ service WorkflowTemplateService { body: "template" } }; + option (google.api.method_signature) = "template"; } // Lists workflows that match the specified filter in the request. @@ -140,6 +164,7 @@ service WorkflowTemplateService { get: "/v1beta2/{parent=projects/*/locations/*}/workflowTemplates" } }; + option (google.api.method_signature) = "parent"; } // Deletes a workflow template. It does not cancel in-progress workflows. @@ -150,11 +175,19 @@ service WorkflowTemplateService { delete: "/v1beta2/{name=projects/*/locations/*/workflowTemplates/*}" } }; + option (google.api.method_signature) = "name"; } } // A Cloud Dataproc workflow template resource. message WorkflowTemplate { + option (google.api.resource) = { + type: "dataproc.googleapis.com/WorkflowTemplate" + pattern: "projects/{project}/regions/{region}/workflowTemplates/{workflow_template}" + pattern: "projects/{project}/locations/{location}/workflowTemplates/{workflow_template}" + history: ORIGINALLY_SINGLE_PATTERN + }; + // Required. The template id. // // The id must contain only letters (a-z, A-Z), numbers (0-9), @@ -162,12 +195,19 @@ message WorkflowTemplate { // or hyphen. Must consist of between 3 and 50 characters. // // . - string id = 2; + string id = 2 [(google.api.field_behavior) = REQUIRED]; - // Output only. The "resource name" of the template, as described - // in https://cloud.google.com/apis/design/resource_names of the form - // `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` - string name = 1; + // Output only. The resource name of the workflow template, as described + // in https://cloud.google.com/apis/design/resource_names. + // + // * For `projects.regions.workflowTemplates`, the resource name of the + // template has the following format: + // `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` + // + // * For `projects.locations.workflowTemplates`, the resource name of the + // template has the following format: + // `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` + string name = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; // Optional. Used to perform a consistent read-modify-write. // @@ -178,13 +218,13 @@ message WorkflowTemplate { // the current template with the `version` field filled in with the // current server version. The user updates other fields in the template, // then returns it as part of the `UpdateWorkflowTemplate` request. - int32 version = 3; + int32 version = 3 [(google.api.field_behavior) = OPTIONAL]; // Output only. The time template was created. - google.protobuf.Timestamp create_time = 4; + google.protobuf.Timestamp create_time = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The time template was last updated. - google.protobuf.Timestamp update_time = 5; + google.protobuf.Timestamp update_time = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; // Optional. The labels to associate with this template. These labels // will be propagated to all jobs and clusters created by the workflow @@ -198,7 +238,7 @@ message WorkflowTemplate { // [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt). // // No more than 32 labels can be associated with a template. - map labels = 6; + map labels = 6 [(google.api.field_behavior) = OPTIONAL]; // Required. WorkflowTemplate scheduling information. WorkflowTemplatePlacement placement = 7; @@ -209,7 +249,7 @@ message WorkflowTemplate { // Optional. Template parameters whose values are substituted into the // template. Values for parameters must be provided when the template is // instantiated. - repeated TemplateParameter parameters = 9; + repeated TemplateParameter parameters = 9 [(google.api.field_behavior) = OPTIONAL]; } // Specifies workflow execution target. @@ -336,7 +376,7 @@ message OrderedJob { // - Main class (in HadoopJob and SparkJob) // - Zone (in ClusterSelector) message TemplateParameter { - // Required. Parameter name. + // Required. Parameter name. // The parameter name is used as the key, and paired with the // parameter value, which are passed to the template when the template // is instantiated. @@ -442,57 +482,66 @@ message WorkflowMetadata { DONE = 3; } - // Output only. The "resource name" of the template. - string template = 1; + // Output only. The resource name of the workflow template as described + // in https://cloud.google.com/apis/design/resource_names. + // + // * For `projects.regions.workflowTemplates`, the resource name of the + // template has the following format: + // `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` + // + // * For `projects.locations.workflowTemplates`, the resource name of the + // template has the following format: + // `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` + string template = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The version of template at the time of // workflow instantiation. - int32 version = 2; + int32 version = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The create cluster operation metadata. - ClusterOperation create_cluster = 3; + ClusterOperation create_cluster = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The workflow graph. - WorkflowGraph graph = 4; + WorkflowGraph graph = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The delete cluster operation metadata. - ClusterOperation delete_cluster = 5; + ClusterOperation delete_cluster = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The workflow state. - State state = 6; + State state = 6 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The name of the target cluster. - string cluster_name = 7; + string cluster_name = 7 [(google.api.field_behavior) = OUTPUT_ONLY]; // Map from parameter names to values that were used for those parameters. map parameters = 8; // Output only. Workflow start time. - google.protobuf.Timestamp start_time = 9; + google.protobuf.Timestamp start_time = 9 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. Workflow end time. - google.protobuf.Timestamp end_time = 10; + google.protobuf.Timestamp end_time = 10 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The UUID of target cluster. - string cluster_uuid = 11; + string cluster_uuid = 11 [(google.api.field_behavior) = OUTPUT_ONLY]; } // The cluster operation triggered by a workflow. message ClusterOperation { // Output only. The id of the cluster operation. - string operation_id = 1; + string operation_id = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. Error, if operation failed. - string error = 2; + string error = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. Indicates the operation is done. - bool done = 3; + bool done = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; } // The workflow graph. message WorkflowGraph { // Output only. The workflow nodes. - repeated WorkflowNode nodes = 1; + repeated WorkflowNode nodes = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; } // The workflow node. @@ -520,41 +569,65 @@ message WorkflowNode { } // Output only. The name of the node. - string step_id = 1; + string step_id = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. Node's prerequisite nodes. - repeated string prerequisite_step_ids = 2; + repeated string prerequisite_step_ids = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The job id; populated after the node enters RUNNING state. - string job_id = 3; + string job_id = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The node state. - NodeState state = 5; + NodeState state = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The error detail. - string error = 6; + string error = 6 [(google.api.field_behavior) = OUTPUT_ONLY]; } // A request to create a workflow template. message CreateWorkflowTemplateRequest { - // Required. The "resource name" of the region, as described - // in https://cloud.google.com/apis/design/resource_names of the form - // `projects/{project_id}/regions/{region}` - string parent = 1; + // Required. The resource name of the region or location, as described + // in https://cloud.google.com/apis/design/resource_names. + // + // * For `projects.regions.workflowTemplates,create`, the resource name of the + // region has the following format: + // `projects/{project_id}/regions/{region}` + // + // * For `projects.locations.workflowTemplates.create`, the resource name of + // the location has the following format: + // `projects/{project_id}/locations/{location}` + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + child_type: "dataproc.googleapis.com/WorkflowTemplate" + } + ]; // Required. The Dataproc workflow template to create. - WorkflowTemplate template = 2; + WorkflowTemplate template = 2 [(google.api.field_behavior) = REQUIRED]; } // A request to fetch a workflow template. message GetWorkflowTemplateRequest { - // Required. The "resource name" of the workflow template, as described - // in https://cloud.google.com/apis/design/resource_names of the form - // `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` - string name = 1; + // Required. The resource name of the workflow template, as described + // in https://cloud.google.com/apis/design/resource_names. + // + // * For `projects.regions.workflowTemplates.get`, the resource name of the + // template has the following format: + // `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` + // + // * For `projects.locations.workflowTemplates.get`, the resource name of the + // template has the following format: + // `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "dataproc.googleapis.com/WorkflowTemplate" + } + ]; // Optional. The version of workflow template to retrieve. Only previously - // instatiated versions can be retrieved. + // instantiated versions can be retrieved. // // If unspecified, retrieves the current version. int32 version = 2; @@ -562,10 +635,22 @@ message GetWorkflowTemplateRequest { // A request to instantiate a workflow template. message InstantiateWorkflowTemplateRequest { - // Required. The "resource name" of the workflow template, as described - // in https://cloud.google.com/apis/design/resource_names of the form - // `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` - string name = 1; + // Required. The resource name of the workflow template, as described + // in https://cloud.google.com/apis/design/resource_names. + // + // * For `projects.regions.workflowTemplates.instantiate`, the resource name + // of the template has the following format: + // `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` + // + // * For `projects.locations.workflowTemplates.instantiate`, the resource name + // of the template has the following format: + // `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "dataproc.googleapis.com/WorkflowTemplate" + } + ]; // Optional. The version of workflow template to instantiate. If specified, // the workflow will be instantiated only if the current version of @@ -596,13 +681,25 @@ message InstantiateWorkflowTemplateRequest { // A request to instantiate an inline workflow template. message InstantiateInlineWorkflowTemplateRequest { - // Required. The "resource name" of the workflow template region, as described - // in https://cloud.google.com/apis/design/resource_names of the form - // `projects/{project_id}/regions/{region}` - string parent = 1; + // Required. The resource name of the region or location, as described + // in https://cloud.google.com/apis/design/resource_names. + // + // * For `projects.regions.workflowTemplates,instantiateinline`, the resource + // name of the region has the following format: + // `projects/{project_id}/regions/{region}` + // + // * For `projects.locations.workflowTemplates.instantiateinline`, the + // resource name of the location has the following format: + // `projects/{project_id}/locations/{location}` + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + child_type: "dataproc.googleapis.com/WorkflowTemplate" + } + ]; // Required. The workflow template to instantiate. - WorkflowTemplate template = 2; + WorkflowTemplate template = 2 [(google.api.field_behavior) = REQUIRED]; // Deprecated. Please use `request_id` field instead. string instance_id = 3; @@ -624,15 +721,27 @@ message UpdateWorkflowTemplateRequest { // Required. The updated workflow template. // // The `template.version` field must match the current version. - WorkflowTemplate template = 1; + WorkflowTemplate template = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "dataproc.googleapis.com/WorkflowTemplate" + } + ]; } // A request to list workflow templates in a project. message ListWorkflowTemplatesRequest { - // Required. The "resource name" of the region, as described - // in https://cloud.google.com/apis/design/resource_names of the form - // `projects/{project_id}/regions/{region}` - string parent = 1; + // Required. The resource name of the region or location, as described + // in https://cloud.google.com/apis/design/resource_names. + // + // * For `projects.regions.workflowTemplates,list`, the resource + // name of the region has the following format: + // `projects/{project_id}/regions/{region}` + // + // * For `projects.locations.workflowTemplates.list`, the + // resource name of the location has the following format: + // `projects/{project_id}/locations/{location}` + string parent = 1 [(google.api.field_behavior) = REQUIRED]; // Optional. The maximum number of results to return in each response. int32 page_size = 2; @@ -645,22 +754,34 @@ message ListWorkflowTemplatesRequest { // A response to a request to list workflow templates in a project. message ListWorkflowTemplatesResponse { // Output only. WorkflowTemplates list. - repeated WorkflowTemplate templates = 1; + repeated WorkflowTemplate templates = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. This token is included in the response if there are more // results to fetch. To fetch additional results, provide this value as the // page_token in a subsequent ListWorkflowTemplatesRequest. - string next_page_token = 2; + string next_page_token = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; } // A request to delete a workflow template. // // Currently started workflows will remain running. message DeleteWorkflowTemplateRequest { - // Required. The "resource name" of the workflow template, as described - // in https://cloud.google.com/apis/design/resource_names of the form - // `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` - string name = 1; + // Required. The resource name of the workflow template, as described + // in https://cloud.google.com/apis/design/resource_names. + // + // * For `projects.regions.workflowTemplates.delete`, the resource name + // of the template has the following format: + // `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` + // + // * For `projects.locations.workflowTemplates.instantiate`, the resource name + // of the template has the following format: + // `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "dataproc.googleapis.com/WorkflowTemplate" + } + ]; // Optional. The version of workflow template to delete. If specified, // will only delete the template if the current server version matches diff --git a/dataproc/google/cloud/dataproc_v1beta2/proto/workflow_templates_pb2.py b/dataproc/google/cloud/dataproc_v1beta2/proto/workflow_templates_pb2.py index 850d445dff25..2bed13fb2178 100644 --- a/dataproc/google/cloud/dataproc_v1beta2/proto/workflow_templates_pb2.py +++ b/dataproc/google/cloud/dataproc_v1beta2/proto/workflow_templates_pb2.py @@ -16,6 +16,9 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 +from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 from google.cloud.dataproc_v1beta2.proto import ( clusters_pb2 as google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_clusters__pb2, ) @@ -37,10 +40,13 @@ "\n!com.google.cloud.dataproc.v1beta2B\026WorkflowTemplatesProtoP\001ZEgoogle.golang.org/genproto/googleapis/cloud/dataproc/v1beta2;dataproc" ), serialized_pb=_b( - '\n\n\nhadoop_job\x18\x02 \x01(\x0b\x32(.google.cloud.dataproc.v1beta2.HadoopJobH\x00\x12<\n\tspark_job\x18\x03 \x01(\x0b\x32\'.google.cloud.dataproc.v1beta2.SparkJobH\x00\x12@\n\x0bpyspark_job\x18\x04 \x01(\x0b\x32).google.cloud.dataproc.v1beta2.PySparkJobH\x00\x12:\n\x08hive_job\x18\x05 \x01(\x0b\x32&.google.cloud.dataproc.v1beta2.HiveJobH\x00\x12\x38\n\x07pig_job\x18\x06 \x01(\x0b\x32%.google.cloud.dataproc.v1beta2.PigJobH\x00\x12\x43\n\rspark_sql_job\x18\x07 \x01(\x0b\x32*.google.cloud.dataproc.v1beta2.SparkSqlJobH\x00\x12\x45\n\x06labels\x18\x08 \x03(\x0b\x32\x35.google.cloud.dataproc.v1beta2.OrderedJob.LabelsEntry\x12@\n\nscheduling\x18\t \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.JobScheduling\x12\x1d\n\x15prerequisite_step_ids\x18\n \x03(\t\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\n\n\x08job_type"\x8e\x01\n\x11TemplateParameter\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06\x66ields\x18\x02 \x03(\t\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\t\x12\x46\n\nvalidation\x18\x04 \x01(\x0b\x32\x32.google.cloud.dataproc.v1beta2.ParameterValidation"\xab\x01\n\x13ParameterValidation\x12?\n\x05regex\x18\x01 \x01(\x0b\x32..google.cloud.dataproc.v1beta2.RegexValidationH\x00\x12@\n\x06values\x18\x02 \x01(\x0b\x32..google.cloud.dataproc.v1beta2.ValueValidationH\x00\x42\x11\n\x0fvalidation_type""\n\x0fRegexValidation\x12\x0f\n\x07regexes\x18\x01 \x03(\t"!\n\x0fValueValidation\x12\x0e\n\x06values\x18\x01 \x03(\t"\x96\x05\n\x10WorkflowMetadata\x12\x10\n\x08template\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\x05\x12G\n\x0e\x63reate_cluster\x18\x03 \x01(\x0b\x32/.google.cloud.dataproc.v1beta2.ClusterOperation\x12;\n\x05graph\x18\x04 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.WorkflowGraph\x12G\n\x0e\x64\x65lete_cluster\x18\x05 \x01(\x0b\x32/.google.cloud.dataproc.v1beta2.ClusterOperation\x12\x44\n\x05state\x18\x06 \x01(\x0e\x32\x35.google.cloud.dataproc.v1beta2.WorkflowMetadata.State\x12\x14\n\x0c\x63luster_name\x18\x07 \x01(\t\x12S\n\nparameters\x18\x08 \x03(\x0b\x32?.google.cloud.dataproc.v1beta2.WorkflowMetadata.ParametersEntry\x12.\n\nstart_time\x18\t \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\n \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x14\n\x0c\x63luster_uuid\x18\x0b \x01(\t\x1a\x31\n\x0fParametersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"8\n\x05State\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0b\n\x07PENDING\x10\x01\x12\x0b\n\x07RUNNING\x10\x02\x12\x08\n\x04\x44ONE\x10\x03"E\n\x10\x43lusterOperation\x12\x14\n\x0coperation_id\x18\x01 \x01(\t\x12\r\n\x05\x65rror\x18\x02 \x01(\t\x12\x0c\n\x04\x64one\x18\x03 \x01(\x08"K\n\rWorkflowGraph\x12:\n\x05nodes\x18\x01 \x03(\x0b\x32+.google.cloud.dataproc.v1beta2.WorkflowNode"\x90\x02\n\x0cWorkflowNode\x12\x0f\n\x07step_id\x18\x01 \x01(\t\x12\x1d\n\x15prerequisite_step_ids\x18\x02 \x03(\t\x12\x0e\n\x06job_id\x18\x03 \x01(\t\x12\x44\n\x05state\x18\x05 \x01(\x0e\x32\x35.google.cloud.dataproc.v1beta2.WorkflowNode.NodeState\x12\r\n\x05\x65rror\x18\x06 \x01(\t"k\n\tNodeState\x12\x1b\n\x17NODE_STATUS_UNSPECIFIED\x10\x00\x12\x0b\n\x07\x42LOCKED\x10\x01\x12\x0c\n\x08RUNNABLE\x10\x02\x12\x0b\n\x07RUNNING\x10\x03\x12\r\n\tCOMPLETED\x10\x04\x12\n\n\x06\x46\x41ILED\x10\x05"r\n\x1d\x43reateWorkflowTemplateRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x41\n\x08template\x18\x02 \x01(\x0b\x32/.google.cloud.dataproc.v1beta2.WorkflowTemplate";\n\x1aGetWorkflowTemplateRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\x05"\x8a\x02\n"InstantiateWorkflowTemplateRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\x05\x12\x17\n\x0binstance_id\x18\x03 \x01(\tB\x02\x18\x01\x12\x12\n\nrequest_id\x18\x05 \x01(\t\x12\x65\n\nparameters\x18\x04 \x03(\x0b\x32Q.google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest.ParametersEntry\x1a\x31\n\x0fParametersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xa6\x01\n(InstantiateInlineWorkflowTemplateRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x41\n\x08template\x18\x02 \x01(\x0b\x32/.google.cloud.dataproc.v1beta2.WorkflowTemplate\x12\x13\n\x0binstance_id\x18\x03 \x01(\t\x12\x12\n\nrequest_id\x18\x04 \x01(\t"b\n\x1dUpdateWorkflowTemplateRequest\x12\x41\n\x08template\x18\x01 \x01(\x0b\x32/.google.cloud.dataproc.v1beta2.WorkflowTemplate"U\n\x1cListWorkflowTemplatesRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"|\n\x1dListWorkflowTemplatesResponse\x12\x42\n\ttemplates\x18\x01 \x03(\x0b\x32/.google.cloud.dataproc.v1beta2.WorkflowTemplate\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t">\n\x1d\x44\x65leteWorkflowTemplateRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\x05\x32\xdf\x0f\n\x17WorkflowTemplateService\x12\x9d\x02\n\x16\x43reateWorkflowTemplate\x12<.google.cloud.dataproc.v1beta2.CreateWorkflowTemplateRequest\x1a/.google.cloud.dataproc.v1beta2.WorkflowTemplate"\x93\x01\x82\xd3\xe4\x93\x02\x8c\x01"8/v1beta2/{parent=projects/*/regions/*}/workflowTemplates:\x08templateZF":/v1beta2/{parent=projects/*/locations/*}/workflowTemplates:\x08template\x12\x81\x02\n\x13GetWorkflowTemplate\x12\x39.google.cloud.dataproc.v1beta2.GetWorkflowTemplateRequest\x1a/.google.cloud.dataproc.v1beta2.WorkflowTemplate"~\x82\xd3\xe4\x93\x02x\x12\x38/v1beta2/{name=projects/*/regions/*/workflowTemplates/*}Z<\x12:/v1beta2/{name=projects/*/locations/*/workflowTemplates/*}\x12\x9f\x02\n\x1bInstantiateWorkflowTemplate\x12\x41.google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest\x1a\x1d.google.longrunning.Operation"\x9d\x01\x82\xd3\xe4\x93\x02\x96\x01"D/v1beta2/{name=projects/*/regions/*/workflowTemplates/*}:instantiate:\x01*ZK"F/v1beta2/{name=projects/*/locations/*/workflowTemplates/*}:instantiate:\x01*\x12\xc5\x02\n!InstantiateInlineWorkflowTemplate\x12G.google.cloud.dataproc.v1beta2.InstantiateInlineWorkflowTemplateRequest\x1a\x1d.google.longrunning.Operation"\xb7\x01\x82\xd3\xe4\x93\x02\xb0\x01"L/v1beta2/{parent=projects/*/locations/*}/workflowTemplates:instantiateInline:\x08templateZV"J/v1beta2/{parent=projects/*/regions/*}/workflowTemplates:instantiateInline:\x08template\x12\xaf\x02\n\x16UpdateWorkflowTemplate\x12<.google.cloud.dataproc.v1beta2.UpdateWorkflowTemplateRequest\x1a/.google.cloud.dataproc.v1beta2.WorkflowTemplate"\xa5\x01\x82\xd3\xe4\x93\x02\x9e\x01\x1a\x41/v1beta2/{template.name=projects/*/regions/*/workflowTemplates/*}:\x08templateZO\x1a\x43/v1beta2/{template.name=projects/*/locations/*/workflowTemplates/*}:\x08template\x12\x92\x02\n\x15ListWorkflowTemplates\x12;.google.cloud.dataproc.v1beta2.ListWorkflowTemplatesRequest\x1a<.google.cloud.dataproc.v1beta2.ListWorkflowTemplatesResponse"~\x82\xd3\xe4\x93\x02x\x12\x38/v1beta2/{parent=projects/*/regions/*}/workflowTemplatesZ<\x12:/v1beta2/{parent=projects/*/locations/*}/workflowTemplates\x12\xee\x01\n\x16\x44\x65leteWorkflowTemplate\x12<.google.cloud.dataproc.v1beta2.DeleteWorkflowTemplateRequest\x1a\x16.google.protobuf.Empty"~\x82\xd3\xe4\x93\x02x*8/v1beta2/{name=projects/*/regions/*/workflowTemplates/*}Z<*:/v1beta2/{name=projects/*/locations/*/workflowTemplates/*}B\x84\x01\n!com.google.cloud.dataproc.v1beta2B\x16WorkflowTemplatesProtoP\x01ZEgoogle.golang.org/genproto/googleapis/cloud/dataproc/v1beta2;dataprocb\x06proto3' + '\n\n\nhadoop_job\x18\x02 \x01(\x0b\x32(.google.cloud.dataproc.v1beta2.HadoopJobH\x00\x12<\n\tspark_job\x18\x03 \x01(\x0b\x32\'.google.cloud.dataproc.v1beta2.SparkJobH\x00\x12@\n\x0bpyspark_job\x18\x04 \x01(\x0b\x32).google.cloud.dataproc.v1beta2.PySparkJobH\x00\x12:\n\x08hive_job\x18\x05 \x01(\x0b\x32&.google.cloud.dataproc.v1beta2.HiveJobH\x00\x12\x38\n\x07pig_job\x18\x06 \x01(\x0b\x32%.google.cloud.dataproc.v1beta2.PigJobH\x00\x12\x43\n\rspark_sql_job\x18\x07 \x01(\x0b\x32*.google.cloud.dataproc.v1beta2.SparkSqlJobH\x00\x12\x45\n\x06labels\x18\x08 \x03(\x0b\x32\x35.google.cloud.dataproc.v1beta2.OrderedJob.LabelsEntry\x12@\n\nscheduling\x18\t \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.JobScheduling\x12\x1d\n\x15prerequisite_step_ids\x18\n \x03(\t\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\n\n\x08job_type"\x8e\x01\n\x11TemplateParameter\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06\x66ields\x18\x02 \x03(\t\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\t\x12\x46\n\nvalidation\x18\x04 \x01(\x0b\x32\x32.google.cloud.dataproc.v1beta2.ParameterValidation"\xab\x01\n\x13ParameterValidation\x12?\n\x05regex\x18\x01 \x01(\x0b\x32..google.cloud.dataproc.v1beta2.RegexValidationH\x00\x12@\n\x06values\x18\x02 \x01(\x0b\x32..google.cloud.dataproc.v1beta2.ValueValidationH\x00\x42\x11\n\x0fvalidation_type""\n\x0fRegexValidation\x12\x0f\n\x07regexes\x18\x01 \x03(\t"!\n\x0fValueValidation\x12\x0e\n\x06values\x18\x01 \x03(\t"\xc8\x05\n\x10WorkflowMetadata\x12\x15\n\x08template\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12\x14\n\x07version\x18\x02 \x01(\x05\x42\x03\xe0\x41\x03\x12L\n\x0e\x63reate_cluster\x18\x03 \x01(\x0b\x32/.google.cloud.dataproc.v1beta2.ClusterOperationB\x03\xe0\x41\x03\x12@\n\x05graph\x18\x04 \x01(\x0b\x32,.google.cloud.dataproc.v1beta2.WorkflowGraphB\x03\xe0\x41\x03\x12L\n\x0e\x64\x65lete_cluster\x18\x05 \x01(\x0b\x32/.google.cloud.dataproc.v1beta2.ClusterOperationB\x03\xe0\x41\x03\x12I\n\x05state\x18\x06 \x01(\x0e\x32\x35.google.cloud.dataproc.v1beta2.WorkflowMetadata.StateB\x03\xe0\x41\x03\x12\x19\n\x0c\x63luster_name\x18\x07 \x01(\tB\x03\xe0\x41\x03\x12S\n\nparameters\x18\x08 \x03(\x0b\x32?.google.cloud.dataproc.v1beta2.WorkflowMetadata.ParametersEntry\x12\x33\n\nstart_time\x18\t \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x31\n\x08\x65nd_time\x18\n \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x19\n\x0c\x63luster_uuid\x18\x0b \x01(\tB\x03\xe0\x41\x03\x1a\x31\n\x0fParametersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"8\n\x05State\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0b\n\x07PENDING\x10\x01\x12\x0b\n\x07RUNNING\x10\x02\x12\x08\n\x04\x44ONE\x10\x03"T\n\x10\x43lusterOperation\x12\x19\n\x0coperation_id\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12\x12\n\x05\x65rror\x18\x02 \x01(\tB\x03\xe0\x41\x03\x12\x11\n\x04\x64one\x18\x03 \x01(\x08\x42\x03\xe0\x41\x03"P\n\rWorkflowGraph\x12?\n\x05nodes\x18\x01 \x03(\x0b\x32+.google.cloud.dataproc.v1beta2.WorkflowNodeB\x03\xe0\x41\x03"\xa9\x02\n\x0cWorkflowNode\x12\x14\n\x07step_id\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12"\n\x15prerequisite_step_ids\x18\x02 \x03(\tB\x03\xe0\x41\x03\x12\x13\n\x06job_id\x18\x03 \x01(\tB\x03\xe0\x41\x03\x12I\n\x05state\x18\x05 \x01(\x0e\x32\x35.google.cloud.dataproc.v1beta2.WorkflowNode.NodeStateB\x03\xe0\x41\x03\x12\x12\n\x05\x65rror\x18\x06 \x01(\tB\x03\xe0\x41\x03"k\n\tNodeState\x12\x1b\n\x17NODE_STATUS_UNSPECIFIED\x10\x00\x12\x0b\n\x07\x42LOCKED\x10\x01\x12\x0c\n\x08RUNNABLE\x10\x02\x12\x0b\n\x07RUNNING\x10\x03\x12\r\n\tCOMPLETED\x10\x04\x12\n\n\x06\x46\x41ILED\x10\x05"\xa9\x01\n\x1d\x43reateWorkflowTemplateRequest\x12@\n\x06parent\x18\x01 \x01(\tB0\xe0\x41\x02\xfa\x41*\x12(dataproc.googleapis.com/WorkflowTemplate\x12\x46\n\x08template\x18\x02 \x01(\x0b\x32/.google.cloud.dataproc.v1beta2.WorkflowTemplateB\x03\xe0\x41\x02"m\n\x1aGetWorkflowTemplateRequest\x12>\n\x04name\x18\x01 \x01(\tB0\xe0\x41\x02\xfa\x41*\n(dataproc.googleapis.com/WorkflowTemplate\x12\x0f\n\x07version\x18\x02 \x01(\x05"\xbc\x02\n"InstantiateWorkflowTemplateRequest\x12>\n\x04name\x18\x01 \x01(\tB0\xe0\x41\x02\xfa\x41*\n(dataproc.googleapis.com/WorkflowTemplate\x12\x0f\n\x07version\x18\x02 \x01(\x05\x12\x17\n\x0binstance_id\x18\x03 \x01(\tB\x02\x18\x01\x12\x12\n\nrequest_id\x18\x05 \x01(\t\x12\x65\n\nparameters\x18\x04 \x03(\x0b\x32Q.google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest.ParametersEntry\x1a\x31\n\x0fParametersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xdd\x01\n(InstantiateInlineWorkflowTemplateRequest\x12@\n\x06parent\x18\x01 \x01(\tB0\xe0\x41\x02\xfa\x41*\x12(dataproc.googleapis.com/WorkflowTemplate\x12\x46\n\x08template\x18\x02 \x01(\x0b\x32/.google.cloud.dataproc.v1beta2.WorkflowTemplateB\x03\xe0\x41\x02\x12\x13\n\x0binstance_id\x18\x03 \x01(\t\x12\x12\n\nrequest_id\x18\x04 \x01(\t"\x94\x01\n\x1dUpdateWorkflowTemplateRequest\x12s\n\x08template\x18\x01 \x01(\x0b\x32/.google.cloud.dataproc.v1beta2.WorkflowTemplateB0\xe0\x41\x02\xfa\x41*\n(dataproc.googleapis.com/WorkflowTemplate"Z\n\x1cListWorkflowTemplatesRequest\x12\x13\n\x06parent\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"\x86\x01\n\x1dListWorkflowTemplatesResponse\x12G\n\ttemplates\x18\x01 \x03(\x0b\x32/.google.cloud.dataproc.v1beta2.WorkflowTemplateB\x03\xe0\x41\x03\x12\x1c\n\x0fnext_page_token\x18\x02 \x01(\tB\x03\xe0\x41\x03"p\n\x1d\x44\x65leteWorkflowTemplateRequest\x12>\n\x04name\x18\x01 \x01(\tB0\xe0\x41\x02\xfa\x41*\n(dataproc.googleapis.com/WorkflowTemplate\x12\x0f\n\x07version\x18\x02 \x01(\x05\x32\xe9\x11\n\x17WorkflowTemplateService\x12\xb0\x02\n\x16\x43reateWorkflowTemplate\x12<.google.cloud.dataproc.v1beta2.CreateWorkflowTemplateRequest\x1a/.google.cloud.dataproc.v1beta2.WorkflowTemplate"\xa6\x01\x82\xd3\xe4\x93\x02\x8c\x01"8/v1beta2/{parent=projects/*/regions/*}/workflowTemplates:\x08templateZF":/v1beta2/{parent=projects/*/locations/*}/workflowTemplates:\x08template\xda\x41\x10parent, template\x12\x89\x02\n\x13GetWorkflowTemplate\x12\x39.google.cloud.dataproc.v1beta2.GetWorkflowTemplateRequest\x1a/.google.cloud.dataproc.v1beta2.WorkflowTemplate"\x85\x01\x82\xd3\xe4\x93\x02x\x12\x38/v1beta2/{name=projects/*/regions/*/workflowTemplates/*}Z<\x12:/v1beta2/{name=projects/*/locations/*/workflowTemplates/*}\xda\x41\x04name\x12\xe5\x02\n\x1bInstantiateWorkflowTemplate\x12\x41.google.cloud.dataproc.v1beta2.InstantiateWorkflowTemplateRequest\x1a\x1d.google.longrunning.Operation"\xe3\x01\x82\xd3\xe4\x93\x02\x96\x01"D/v1beta2/{name=projects/*/regions/*/workflowTemplates/*}:instantiate:\x01*ZK"F/v1beta2/{name=projects/*/locations/*/workflowTemplates/*}:instantiate:\x01*\xda\x41\x04name\xda\x41\x10name, parameters\xca\x41)\n\x15google.protobuf.Empty\x12\x10WorkflowMetadata\x12\x84\x03\n!InstantiateInlineWorkflowTemplate\x12G.google.cloud.dataproc.v1beta2.InstantiateInlineWorkflowTemplateRequest\x1a\x1d.google.longrunning.Operation"\xf6\x01\x82\xd3\xe4\x93\x02\xb0\x01"L/v1beta2/{parent=projects/*/locations/*}/workflowTemplates:instantiateInline:\x08templateZV"J/v1beta2/{parent=projects/*/regions/*}/workflowTemplates:instantiateInline:\x08template\xda\x41\x10parent, template\xca\x41)\n\x15google.protobuf.Empty\x12\x10WorkflowMetadata\x12\xba\x02\n\x16UpdateWorkflowTemplate\x12<.google.cloud.dataproc.v1beta2.UpdateWorkflowTemplateRequest\x1a/.google.cloud.dataproc.v1beta2.WorkflowTemplate"\xb0\x01\x82\xd3\xe4\x93\x02\x9e\x01\x1a\x41/v1beta2/{template.name=projects/*/regions/*/workflowTemplates/*}:\x08templateZO\x1a\x43/v1beta2/{template.name=projects/*/locations/*/workflowTemplates/*}:\x08template\xda\x41\x08template\x12\x9c\x02\n\x15ListWorkflowTemplates\x12;.google.cloud.dataproc.v1beta2.ListWorkflowTemplatesRequest\x1a<.google.cloud.dataproc.v1beta2.ListWorkflowTemplatesResponse"\x87\x01\x82\xd3\xe4\x93\x02x\x12\x38/v1beta2/{parent=projects/*/regions/*}/workflowTemplatesZ<\x12:/v1beta2/{parent=projects/*/locations/*}/workflowTemplates\xda\x41\x06parent\x12\xf6\x01\n\x16\x44\x65leteWorkflowTemplate\x12<.google.cloud.dataproc.v1beta2.DeleteWorkflowTemplateRequest\x1a\x16.google.protobuf.Empty"\x85\x01\x82\xd3\xe4\x93\x02x*8/v1beta2/{name=projects/*/regions/*/workflowTemplates/*}Z<*:/v1beta2/{name=projects/*/locations/*/workflowTemplates/*}\xda\x41\x04name\x1aK\xca\x41\x17\x64\x61taproc.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformB\x84\x01\n!com.google.cloud.dataproc.v1beta2B\x16WorkflowTemplatesProtoP\x01ZEgoogle.golang.org/genproto/googleapis/cloud/dataproc/v1beta2;dataprocb\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_client__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, + google_dot_api_dot_resource__pb2.DESCRIPTOR, google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_clusters__pb2.DESCRIPTOR, google_dot_cloud_dot_dataproc__v1beta2_dot_proto_dot_jobs__pb2.DESCRIPTOR, google_dot_longrunning_dot_operations__pb2.DESCRIPTOR, @@ -71,8 +77,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=3046, - serialized_end=3102, + serialized_start=3421, + serialized_end=3477, ) _sym_db.RegisterEnumDescriptor(_WORKFLOWMETADATA_STATE) @@ -107,8 +113,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=3418, - serialized_end=3525, + serialized_start=3838, + serialized_end=3945, ) _sym_db.RegisterEnumDescriptor(_WORKFLOWNODE_NODESTATE) @@ -165,8 +171,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=767, - serialized_end=812, + serialized_start=887, + serialized_end=932, ) _WORKFLOWTEMPLATE = _descriptor.Descriptor( @@ -191,7 +197,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -209,7 +215,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -227,7 +233,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -245,7 +251,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -263,7 +269,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -281,7 +287,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -335,20 +341,22 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], extensions=[], nested_types=[_WORKFLOWTEMPLATE_LABELSENTRY], enum_types=[], - serialized_options=None, + serialized_options=_b( + "\352A\306\001\n(dataproc.googleapis.com/WorkflowTemplate\022Iprojects/{project}/regions/{region}/workflowTemplates/{workflow_template}\022Mprojects/{project}/locations/{location}/workflowTemplates/{workflow_template} \001" + ), is_extendable=False, syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=325, - serialized_end=812, + serialized_start=410, + serialized_end=1137, ) @@ -412,8 +420,8 @@ fields=[], ) ], - serialized_start=815, - serialized_end=1005, + serialized_start=1140, + serialized_end=1330, ) @@ -469,8 +477,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=767, - serialized_end=812, + serialized_start=887, + serialized_end=932, ) _MANAGEDCLUSTER = _descriptor.Descriptor( @@ -543,8 +551,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1008, - serialized_end=1230, + serialized_start=1333, + serialized_end=1555, ) @@ -600,8 +608,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1357, - serialized_end=1409, + serialized_start=1682, + serialized_end=1734, ) _CLUSTERSELECTOR = _descriptor.Descriptor( @@ -656,8 +664,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1233, - serialized_end=1409, + serialized_start=1558, + serialized_end=1734, ) @@ -713,8 +721,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=767, - serialized_end=812, + serialized_start=887, + serialized_end=932, ) _ORDEREDJOB = _descriptor.Descriptor( @@ -921,8 +929,8 @@ fields=[], ) ], - serialized_start=1412, - serialized_end=2047, + serialized_start=1737, + serialized_end=2372, ) @@ -1014,8 +1022,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2050, - serialized_end=2192, + serialized_start=2375, + serialized_end=2517, ) @@ -1079,8 +1087,8 @@ fields=[], ) ], - serialized_start=2195, - serialized_end=2366, + serialized_start=2520, + serialized_end=2691, ) @@ -1118,8 +1126,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2368, - serialized_end=2402, + serialized_start=2693, + serialized_end=2727, ) @@ -1157,8 +1165,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2404, - serialized_end=2437, + serialized_start=2729, + serialized_end=2762, ) @@ -1214,8 +1222,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2995, - serialized_end=3044, + serialized_start=3370, + serialized_end=3419, ) _WORKFLOWMETADATA = _descriptor.Descriptor( @@ -1240,7 +1248,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1258,7 +1266,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1276,7 +1284,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1294,7 +1302,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1312,7 +1320,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1330,7 +1338,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1348,7 +1356,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1384,7 +1392,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1402,7 +1410,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1420,7 +1428,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], @@ -1432,8 +1440,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2440, - serialized_end=3102, + serialized_start=2765, + serialized_end=3477, ) @@ -1459,7 +1467,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1477,7 +1485,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1495,7 +1503,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], @@ -1507,8 +1515,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3104, - serialized_end=3173, + serialized_start=3479, + serialized_end=3563, ) @@ -1534,7 +1542,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ) ], @@ -1546,8 +1554,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3175, - serialized_end=3250, + serialized_start=3565, + serialized_end=3645, ) @@ -1573,7 +1581,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1591,7 +1599,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1609,7 +1617,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1627,7 +1635,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1645,7 +1653,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], @@ -1657,8 +1665,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3253, - serialized_end=3525, + serialized_start=3648, + serialized_end=3945, ) @@ -1684,7 +1692,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A*\022(dataproc.googleapis.com/WorkflowTemplate" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1702,7 +1712,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -1714,8 +1724,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3527, - serialized_end=3641, + serialized_start=3948, + serialized_end=4117, ) @@ -1741,7 +1751,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A*\n(dataproc.googleapis.com/WorkflowTemplate" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1771,8 +1783,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3643, - serialized_end=3702, + serialized_start=4119, + serialized_end=4228, ) @@ -1828,8 +1840,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2995, - serialized_end=3044, + serialized_start=3370, + serialized_end=3419, ) _INSTANTIATEWORKFLOWTEMPLATEREQUEST = _descriptor.Descriptor( @@ -1854,7 +1866,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A*\n(dataproc.googleapis.com/WorkflowTemplate" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1938,8 +1952,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3705, - serialized_end=3971, + serialized_start=4231, + serialized_end=4547, ) @@ -1965,7 +1979,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A*\022(dataproc.googleapis.com/WorkflowTemplate" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1983,7 +1999,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2031,8 +2047,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3974, - serialized_end=4140, + serialized_start=4550, + serialized_end=4771, ) @@ -2058,7 +2074,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A*\n(dataproc.googleapis.com/WorkflowTemplate" + ), file=DESCRIPTOR, ) ], @@ -2070,8 +2088,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4142, - serialized_end=4240, + serialized_start=4774, + serialized_end=4922, ) @@ -2097,7 +2115,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2145,8 +2163,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4242, - serialized_end=4327, + serialized_start=4924, + serialized_end=5014, ) @@ -2172,7 +2190,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2190,7 +2208,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], @@ -2202,8 +2220,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4329, - serialized_end=4453, + serialized_start=5017, + serialized_end=5151, ) @@ -2229,7 +2247,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A*\n(dataproc.googleapis.com/WorkflowTemplate" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2259,8 +2279,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4455, - serialized_end=4517, + serialized_start=5153, + serialized_end=5265, ) _WORKFLOWTEMPLATE_LABELSENTRY.containing_type = _WORKFLOWTEMPLATE @@ -2489,10 +2509,16 @@ Cannot begin or end with underscore or hyphen. Must consist of between 3 and 50 characters. . name: - Output only. The "resource name" of the template, as described - in https://cloud.google.com/apis/design/resource\_names of the - form ``projects/{project_id}/regions/{region}/workflowTemplate - s/{template_id}`` + Output only. The resource name of the workflow template, as + described in + https://cloud.google.com/apis/design/resource\_names. - For + ``projects.regions.workflowTemplates``, the resource name of + the template has the following format: ``projects/{proje + ct_id}/regions/{region}/workflowTemplates/{template_id}`` - + For ``projects.locations.workflowTemplates``, the resource + name of the template has the following format: ``project + s/{project_id}/locations/{location}/workflowTemplates/{templat + e_id}`` version: Optional. Used to perform a consistent read-modify-write. This field should be left blank for a @@ -2839,7 +2865,16 @@ Attributes: template: - Output only. The "resource name" of the template. + Output only. The resource name of the workflow template as + described in + https://cloud.google.com/apis/design/resource\_names. - For + ``projects.regions.workflowTemplates``, the resource name of + the template has the following format: ``projects/{proje + ct_id}/regions/{region}/workflowTemplates/{template_id}`` - + For ``projects.locations.workflowTemplates``, the resource + name of the template has the following format: ``project + s/{project_id}/locations/{location}/workflowTemplates/{templat + e_id}`` version: Output only. The version of template at the time of workflow instantiation. @@ -2947,9 +2982,15 @@ Attributes: parent: - Required. The "resource name" of the region, as described in - https://cloud.google.com/apis/design/resource\_names of the - form ``projects/{project_id}/regions/{region}`` + Required. The resource name of the region or location, as + described in + https://cloud.google.com/apis/design/resource\_names. - For + ``projects.regions.workflowTemplates,create``, the resource + name of the region has the following format: + ``projects/{project_id}/regions/{region}`` - For + ``projects.locations.workflowTemplates.create``, the resource + name of the location has the following format: + ``projects/{project_id}/locations/{location}`` template: Required. The Dataproc workflow template to create. """, @@ -2969,14 +3010,19 @@ Attributes: name: - Required. The "resource name" of the workflow template, as + Required. The resource name of the workflow template, as described in - https://cloud.google.com/apis/design/resource\_names of the - form ``projects/{project_id}/regions/{region}/workflowTemplate - s/{template_id}`` + https://cloud.google.com/apis/design/resource\_names. - For + ``projects.regions.workflowTemplates.get``, the resource name + of the template has the following format: ``projects/{pr + oject_id}/regions/{region}/workflowTemplates/{template_id}`` + - For ``projects.locations.workflowTemplates.get``, the + resource name of the template has the following format: + ``projects/{project_id}/locations/{location}/workflowTemplates + /{template_id}`` version: Optional. The version of workflow template to retrieve. Only - previously instatiated versions can be retrieved. If + previously instantiated versions can be retrieved. If unspecified, retrieves the current version. """, # @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1beta2.GetWorkflowTemplateRequest) @@ -3004,11 +3050,17 @@ Attributes: name: - Required. The "resource name" of the workflow template, as + Required. The resource name of the workflow template, as described in - https://cloud.google.com/apis/design/resource\_names of the - form ``projects/{project_id}/regions/{region}/workflowTemplate - s/{template_id}`` + https://cloud.google.com/apis/design/resource\_names. - For + ``projects.regions.workflowTemplates.instantiate``, the + resource name of the template has the following format: + ``projects/{project_id}/regions/{region}/workflowTemplates/{te + mplate_id}`` - For + ``projects.locations.workflowTemplates.instantiate``, the + resource name of the template has the following format: ``p + rojects/{project_id}/locations/{location}/workflowTemplates/{t + emplate_id}`` version: Optional. The version of workflow template to instantiate. If specified, the workflow will be instantiated only if the @@ -3048,10 +3100,15 @@ Attributes: parent: - Required. The "resource name" of the workflow template region, - as described in - https://cloud.google.com/apis/design/resource\_names of the - form ``projects/{project_id}/regions/{region}`` + Required. The resource name of the region or location, as + described in + https://cloud.google.com/apis/design/resource\_names. - For + ``projects.regions.workflowTemplates,instantiateinline``, the + resource name of the region has the following format: + ``projects/{project_id}/regions/{region}`` - For + ``projects.locations.workflowTemplates.instantiateinline``, + the resource name of the location has the following format: + ``projects/{project_id}/locations/{location}`` template: Required. The workflow template to instantiate. instance_id: @@ -3101,9 +3158,15 @@ Attributes: parent: - Required. The "resource name" of the region, as described in - https://cloud.google.com/apis/design/resource\_names of the - form ``projects/{project_id}/regions/{region}`` + Required. The resource name of the region or location, as + described in + https://cloud.google.com/apis/design/resource\_names. - For + ``projects.regions.workflowTemplates,list``, the resource name + of the region has the following format: + ``projects/{project_id}/regions/{region}`` - For + ``projects.locations.workflowTemplates.list``, the resource + name of the location has the following format: + ``projects/{project_id}/locations/{location}`` page_size: Optional. The maximum number of results to return in each response. @@ -3152,11 +3215,17 @@ Attributes: name: - Required. The "resource name" of the workflow template, as + Required. The resource name of the workflow template, as described in - https://cloud.google.com/apis/design/resource\_names of the - form ``projects/{project_id}/regions/{region}/workflowTemplate - s/{template_id}`` + https://cloud.google.com/apis/design/resource\_names. - For + ``projects.regions.workflowTemplates.delete``, the resource + name of the template has the following format: ``project + s/{project_id}/regions/{region}/workflowTemplates/{template_id + }`` - For + ``projects.locations.workflowTemplates.instantiate``, the + resource name of the template has the following format: ``p + rojects/{project_id}/locations/{location}/workflowTemplates/{t + emplate_id}`` version: Optional. The version of workflow template to delete. If specified, will only delete the template if the current server @@ -3170,21 +3239,61 @@ DESCRIPTOR._options = None _WORKFLOWTEMPLATE_LABELSENTRY._options = None +_WORKFLOWTEMPLATE.fields_by_name["id"]._options = None +_WORKFLOWTEMPLATE.fields_by_name["name"]._options = None +_WORKFLOWTEMPLATE.fields_by_name["version"]._options = None +_WORKFLOWTEMPLATE.fields_by_name["create_time"]._options = None +_WORKFLOWTEMPLATE.fields_by_name["update_time"]._options = None +_WORKFLOWTEMPLATE.fields_by_name["labels"]._options = None +_WORKFLOWTEMPLATE.fields_by_name["parameters"]._options = None +_WORKFLOWTEMPLATE._options = None _MANAGEDCLUSTER_LABELSENTRY._options = None _CLUSTERSELECTOR_CLUSTERLABELSENTRY._options = None _ORDEREDJOB_LABELSENTRY._options = None _WORKFLOWMETADATA_PARAMETERSENTRY._options = None +_WORKFLOWMETADATA.fields_by_name["template"]._options = None +_WORKFLOWMETADATA.fields_by_name["version"]._options = None +_WORKFLOWMETADATA.fields_by_name["create_cluster"]._options = None +_WORKFLOWMETADATA.fields_by_name["graph"]._options = None +_WORKFLOWMETADATA.fields_by_name["delete_cluster"]._options = None +_WORKFLOWMETADATA.fields_by_name["state"]._options = None +_WORKFLOWMETADATA.fields_by_name["cluster_name"]._options = None +_WORKFLOWMETADATA.fields_by_name["start_time"]._options = None +_WORKFLOWMETADATA.fields_by_name["end_time"]._options = None +_WORKFLOWMETADATA.fields_by_name["cluster_uuid"]._options = None +_CLUSTEROPERATION.fields_by_name["operation_id"]._options = None +_CLUSTEROPERATION.fields_by_name["error"]._options = None +_CLUSTEROPERATION.fields_by_name["done"]._options = None +_WORKFLOWGRAPH.fields_by_name["nodes"]._options = None +_WORKFLOWNODE.fields_by_name["step_id"]._options = None +_WORKFLOWNODE.fields_by_name["prerequisite_step_ids"]._options = None +_WORKFLOWNODE.fields_by_name["job_id"]._options = None +_WORKFLOWNODE.fields_by_name["state"]._options = None +_WORKFLOWNODE.fields_by_name["error"]._options = None +_CREATEWORKFLOWTEMPLATEREQUEST.fields_by_name["parent"]._options = None +_CREATEWORKFLOWTEMPLATEREQUEST.fields_by_name["template"]._options = None +_GETWORKFLOWTEMPLATEREQUEST.fields_by_name["name"]._options = None _INSTANTIATEWORKFLOWTEMPLATEREQUEST_PARAMETERSENTRY._options = None +_INSTANTIATEWORKFLOWTEMPLATEREQUEST.fields_by_name["name"]._options = None _INSTANTIATEWORKFLOWTEMPLATEREQUEST.fields_by_name["instance_id"]._options = None +_INSTANTIATEINLINEWORKFLOWTEMPLATEREQUEST.fields_by_name["parent"]._options = None +_INSTANTIATEINLINEWORKFLOWTEMPLATEREQUEST.fields_by_name["template"]._options = None +_UPDATEWORKFLOWTEMPLATEREQUEST.fields_by_name["template"]._options = None +_LISTWORKFLOWTEMPLATESREQUEST.fields_by_name["parent"]._options = None +_LISTWORKFLOWTEMPLATESRESPONSE.fields_by_name["templates"]._options = None +_LISTWORKFLOWTEMPLATESRESPONSE.fields_by_name["next_page_token"]._options = None +_DELETEWORKFLOWTEMPLATEREQUEST.fields_by_name["name"]._options = None _WORKFLOWTEMPLATESERVICE = _descriptor.ServiceDescriptor( name="WorkflowTemplateService", full_name="google.cloud.dataproc.v1beta2.WorkflowTemplateService", file=DESCRIPTOR, index=0, - serialized_options=None, - serialized_start=4520, - serialized_end=6535, + serialized_options=_b( + "\312A\027dataproc.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform" + ), + serialized_start=5268, + serialized_end=7549, methods=[ _descriptor.MethodDescriptor( name="CreateWorkflowTemplate", @@ -3194,7 +3303,7 @@ input_type=_CREATEWORKFLOWTEMPLATEREQUEST, output_type=_WORKFLOWTEMPLATE, serialized_options=_b( - '\202\323\344\223\002\214\001"8/v1beta2/{parent=projects/*/regions/*}/workflowTemplates:\010templateZF":/v1beta2/{parent=projects/*/locations/*}/workflowTemplates:\010template' + '\202\323\344\223\002\214\001"8/v1beta2/{parent=projects/*/regions/*}/workflowTemplates:\010templateZF":/v1beta2/{parent=projects/*/locations/*}/workflowTemplates:\010template\332A\020parent, template' ), ), _descriptor.MethodDescriptor( @@ -3205,7 +3314,7 @@ input_type=_GETWORKFLOWTEMPLATEREQUEST, output_type=_WORKFLOWTEMPLATE, serialized_options=_b( - "\202\323\344\223\002x\0228/v1beta2/{name=projects/*/regions/*/workflowTemplates/*}Z<\022:/v1beta2/{name=projects/*/locations/*/workflowTemplates/*}" + "\202\323\344\223\002x\0228/v1beta2/{name=projects/*/regions/*/workflowTemplates/*}Z<\022:/v1beta2/{name=projects/*/locations/*/workflowTemplates/*}\332A\004name" ), ), _descriptor.MethodDescriptor( @@ -3216,7 +3325,7 @@ input_type=_INSTANTIATEWORKFLOWTEMPLATEREQUEST, output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, serialized_options=_b( - '\202\323\344\223\002\226\001"D/v1beta2/{name=projects/*/regions/*/workflowTemplates/*}:instantiate:\001*ZK"F/v1beta2/{name=projects/*/locations/*/workflowTemplates/*}:instantiate:\001*' + '\202\323\344\223\002\226\001"D/v1beta2/{name=projects/*/regions/*/workflowTemplates/*}:instantiate:\001*ZK"F/v1beta2/{name=projects/*/locations/*/workflowTemplates/*}:instantiate:\001*\332A\004name\332A\020name, parameters\312A)\n\025google.protobuf.Empty\022\020WorkflowMetadata' ), ), _descriptor.MethodDescriptor( @@ -3227,7 +3336,7 @@ input_type=_INSTANTIATEINLINEWORKFLOWTEMPLATEREQUEST, output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, serialized_options=_b( - '\202\323\344\223\002\260\001"L/v1beta2/{parent=projects/*/locations/*}/workflowTemplates:instantiateInline:\010templateZV"J/v1beta2/{parent=projects/*/regions/*}/workflowTemplates:instantiateInline:\010template' + '\202\323\344\223\002\260\001"L/v1beta2/{parent=projects/*/locations/*}/workflowTemplates:instantiateInline:\010templateZV"J/v1beta2/{parent=projects/*/regions/*}/workflowTemplates:instantiateInline:\010template\332A\020parent, template\312A)\n\025google.protobuf.Empty\022\020WorkflowMetadata' ), ), _descriptor.MethodDescriptor( @@ -3238,7 +3347,7 @@ input_type=_UPDATEWORKFLOWTEMPLATEREQUEST, output_type=_WORKFLOWTEMPLATE, serialized_options=_b( - "\202\323\344\223\002\236\001\032A/v1beta2/{template.name=projects/*/regions/*/workflowTemplates/*}:\010templateZO\032C/v1beta2/{template.name=projects/*/locations/*/workflowTemplates/*}:\010template" + "\202\323\344\223\002\236\001\032A/v1beta2/{template.name=projects/*/regions/*/workflowTemplates/*}:\010templateZO\032C/v1beta2/{template.name=projects/*/locations/*/workflowTemplates/*}:\010template\332A\010template" ), ), _descriptor.MethodDescriptor( @@ -3249,7 +3358,7 @@ input_type=_LISTWORKFLOWTEMPLATESREQUEST, output_type=_LISTWORKFLOWTEMPLATESRESPONSE, serialized_options=_b( - "\202\323\344\223\002x\0228/v1beta2/{parent=projects/*/regions/*}/workflowTemplatesZ<\022:/v1beta2/{parent=projects/*/locations/*}/workflowTemplates" + "\202\323\344\223\002x\0228/v1beta2/{parent=projects/*/regions/*}/workflowTemplatesZ<\022:/v1beta2/{parent=projects/*/locations/*}/workflowTemplates\332A\006parent" ), ), _descriptor.MethodDescriptor( @@ -3260,7 +3369,7 @@ input_type=_DELETEWORKFLOWTEMPLATEREQUEST, output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, serialized_options=_b( - "\202\323\344\223\002x*8/v1beta2/{name=projects/*/regions/*/workflowTemplates/*}Z<*:/v1beta2/{name=projects/*/locations/*/workflowTemplates/*}" + "\202\323\344\223\002x*8/v1beta2/{name=projects/*/regions/*/workflowTemplates/*}Z<*:/v1beta2/{name=projects/*/locations/*/workflowTemplates/*}\332A\004name" ), ), ], diff --git a/dataproc/google/cloud/dataproc_v1beta2/proto/workflow_templates_pb2_grpc.py b/dataproc/google/cloud/dataproc_v1beta2/proto/workflow_templates_pb2_grpc.py index 012152a68d1a..e05372f50416 100644 --- a/dataproc/google/cloud/dataproc_v1beta2/proto/workflow_templates_pb2_grpc.py +++ b/dataproc/google/cloud/dataproc_v1beta2/proto/workflow_templates_pb2_grpc.py @@ -94,7 +94,9 @@ def InstantiateWorkflowTemplate(self, request, context): clusters to be deleted. The [Operation.metadata][google.longrunning.Operation.metadata] will be - [WorkflowMetadata][google.cloud.dataproc.v1beta2.WorkflowMetadata]. + [WorkflowMetadata](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1beta2#workflowmetadata). + Also see [Using + WorkflowMetadata](/dataproc/docs/concepts/workflows/debugging#using_workflowmetadata). On successful completion, [Operation.response][google.longrunning.Operation.response] will be @@ -122,7 +124,9 @@ def InstantiateInlineWorkflowTemplate(self, request, context): clusters to be deleted. The [Operation.metadata][google.longrunning.Operation.metadata] will be - [WorkflowMetadata][google.cloud.dataproc.v1beta2.WorkflowMetadata]. + [WorkflowMetadata](/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#workflowmetadata). + Also see [Using + WorkflowMetadata](/dataproc/docs/concepts/workflows/debugging#using_workflowmetadata). On successful completion, [Operation.response][google.longrunning.Operation.response] will be diff --git a/dataproc/synth.metadata b/dataproc/synth.metadata index 99c6b1a0eb60..dfd138d546ab 100644 --- a/dataproc/synth.metadata +++ b/dataproc/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-08-06T12:21:27.830464Z", + "updateTime": "2019-10-15T12:20:15.152154Z", "sources": [ { "generator": { "name": "artman", - "version": "0.32.1", - "dockerImage": "googleapis/artman@sha256:a684d40ba9a4e15946f5f2ca6b4bd9fe301192f522e9de4fff622118775f309b" + "version": "0.39.0", + "dockerImage": "googleapis/artman@sha256:72554d0b3bdc0b4ac7d6726a6a606c00c14b454339037ed86be94574fb05d9f3" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "e699b0cba64ffddfae39633417180f1f65875896", - "internalRef": "261759677" + "sha": "82e14b22669d5748d7a0922634159794ce0bf796", + "internalRef": "274692507" } }, { diff --git a/datastore/CHANGELOG.md b/datastore/CHANGELOG.md index 621e31cd680b..2dc7b6c7cc86 100644 --- a/datastore/CHANGELOG.md +++ b/datastore/CHANGELOG.md @@ -4,6 +4,27 @@ [1]: https://pypi.org/project/google-cloud-datastore/#history +## 1.10.0 + +10-10-2019 12:20 PDT + + +### Implementation Changes +- Remove send / receive message size limit (via synth). ([#8952](https://github.com/googleapis/google-cloud-python/pull/8952)) + +### New Features +- Add `client_options` to constructors for manual clients. ([#9055](https://github.com/googleapis/google-cloud-python/pull/9055)) + +### Dependencies +- Pin `google-cloud-core >= 1.0.3, < 2.0.0dev`. ([#9055](https://github.com/googleapis/google-cloud-python/pull/9055)) + +### Documentation +- Fix intersphinx reference to requests. ([#9294](https://github.com/googleapis/google-cloud-python/pull/9294)) +- Remove CI for gh-pages, use googleapis.dev for `api_core` refs. ([#9085](https://github.com/googleapis/google-cloud-python/pull/9085)) +- Remove compatability badges from READMEs. ([#9035](https://github.com/googleapis/google-cloud-python/pull/9035)) +- Update docs for building datastore indexes. ([#8707](https://github.com/googleapis/google-cloud-python/pull/8707)) +- Update intersphinx mapping for requests. ([#8805](https://github.com/googleapis/google-cloud-python/pull/8805)) + ## 1.9.0 07-24-2019 16:04 PDT diff --git a/datastore/README.rst b/datastore/README.rst index e613092a58bf..bb685f04f9c9 100644 --- a/datastore/README.rst +++ b/datastore/README.rst @@ -105,7 +105,7 @@ Example Usage Next Steps ~~~~~~~~~~ -- Read the `Client Library Documentation`_ for Google Cloud Datastore API +- Read the `Client Library Documentation`_ for Google Cloud Datastore API to see other available methods on the client. - Read the `Product documentation`_ to learn more about the product and see How-to Guides. diff --git a/datastore/setup.py b/datastore/setup.py index 06c15b0b8434..4fadd33db7ce 100644 --- a/datastore/setup.py +++ b/datastore/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-datastore" description = "Google Cloud Datastore API client library" -version = "1.9.0" +version = "1.10.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' diff --git a/dlp/docs/conf.py b/dlp/docs/conf.py index c22d92e47568..2d33b89f6062 100644 --- a/dlp/docs/conf.py +++ b/dlp/docs/conf.py @@ -338,7 +338,7 @@ "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("https://requests.kennethreitz.org/en/stable/", None), + "requests": ("https://requests.kennethreitz.org/en/master/", None), "fastavro": ("https://fastavro.readthedocs.io/en/stable/", None), "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), } diff --git a/dlp/noxfile.py b/dlp/noxfile.py index f52153dd2dc2..45ecc8fff050 100644 --- a/dlp/noxfile.py +++ b/dlp/noxfile.py @@ -118,6 +118,7 @@ def system(session): session.install("-e", "../test_utils/") session.install("-e", ".") + env = {} # Additional setup for VPCSC system tests if os.environ.get("GOOGLE_CLOUD_TESTS_IN_VPCSC", "false").lower() != "true": # Unset PROJECT_ID, since VPCSC system tests expect this to be a project diff --git a/dlp/synth.metadata b/dlp/synth.metadata index fbdfcc489bd7..aa33a4001070 100644 --- a/dlp/synth.metadata +++ b/dlp/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-09-24T18:27:57.363448Z", + "updateTime": "2019-10-05T12:21:54.232546Z", "sources": [ { "generator": { "name": "artman", - "version": "0.37.0", - "dockerImage": "googleapis/artman@sha256:0f66008f69061ea6d41499e2a34da3fc64fc7c9798077e3a37158653a135d801" + "version": "0.38.0", + "dockerImage": "googleapis/artman@sha256:0d2f8d429110aeb8d82df6550ef4ede59d40df9062d260a1580fce688b0512bf" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "c277413f1840b735582235ece0a7f2825520b7a5", - "internalRef": "270943570" + "sha": "ceb8e2fb12f048cc94caae532ef0b4cf026a78f3", + "internalRef": "272971705" } }, { diff --git a/dlp/tests/system/gapic/v2/test_system_dlp_service_v2_vpcsc.py b/dlp/tests/system/gapic/v2/test_system_dlp_service_v2_vpcsc.py index 2dce7a68e0d0..f075044d00e1 100644 --- a/dlp/tests/system/gapic/v2/test_system_dlp_service_v2_vpcsc.py +++ b/dlp/tests/system/gapic/v2/test_system_dlp_service_v2_vpcsc.py @@ -96,7 +96,7 @@ def test_deidentify_content(self): name_inside = client.project_path(PROJECT_INSIDE) delayed_inside = lambda: client.deidentify_content(name_inside) name_outside = client.project_path(PROJECT_OUTSIDE) - delayed_outside = lambda: client.deidentify_conent(name_outside) + delayed_outside = lambda: client.deidentify_content(name_outside) TestSystemDlpService._do_test(delayed_inside, delayed_outside) @pytest.mark.skipif( diff --git a/dns/CHANGELOG.md b/dns/CHANGELOG.md index 19e7b74c322a..86088ee8e792 100644 --- a/dns/CHANGELOG.md +++ b/dns/CHANGELOG.md @@ -4,6 +4,22 @@ [1]: https://pypi.org/project/google-cloud-dns/#history +## 0.31.0 + +10-15-2019 06:42 PDT + + +### Dependencies +- Pin 'google-cloud-core >= 1.0.3, < 2.0.0dev'. ([#9445](https://github.com/googleapis/google-cloud-python/pull/9445)) + +### Documentation +- Fix intersphinx reference to `requests`. ([#9294](https://github.com/googleapis/google-cloud-python/pull/9294)) +- Fix broken links in docs. ([#9148](https://github.com/googleapis/google-cloud-python/pull/9148)) +- Remove CI for gh-pages, use googleapis.dev for `api_core` refs. ([#9085](https://github.com/googleapis/google-cloud-python/pull/9085)) +- Remove compatability badges from READMEs. ([#9035](https://github.com/googleapis/google-cloud-python/pull/9035)) +- Update intersphinx mapping for requests. ([#8805](https://github.com/googleapis/google-cloud-python/pull/8805)) +- Link to googleapis.dev documentation in READMEs. ([#8705](https://github.com/googleapis/google-cloud-python/pull/8705)) + ## 0.30.2 07-11-2019 10:09 PDT diff --git a/dns/google/cloud/dns/_http.py b/dns/google/cloud/dns/_http.py index 510681261bee..51f3f5634af7 100644 --- a/dns/google/cloud/dns/_http.py +++ b/dns/google/cloud/dns/_http.py @@ -29,15 +29,14 @@ class Connection(_http.JSONConnection): :param client_info: (Optional) instance used to generate user agent. """ - def __init__(self, client, client_info=None): - super(Connection, self).__init__(client, client_info) + DEFAULT_API_ENDPOINT = "https://dns.googleapis.com" + def __init__(self, client, client_info=None, api_endpoint=DEFAULT_API_ENDPOINT): + super(Connection, self).__init__(client, client_info) + self.API_BASE_URL = api_endpoint self._client_info.gapic_version = __version__ self._client_info.client_library_version = __version__ - API_BASE_URL = "https://dns.googleapis.com" - """The base of the API call URL.""" - API_VERSION = "v1" """The version of the API, used in building the API call's URL.""" diff --git a/dns/google/cloud/dns/client.py b/dns/google/cloud/dns/client.py index 4bfa112d5226..f1817a3cac2c 100644 --- a/dns/google/cloud/dns/client.py +++ b/dns/google/cloud/dns/client.py @@ -15,6 +15,7 @@ """Client for interacting with the Google Cloud DNS API.""" from google.api_core import page_iterator +from google.api_core import client_options as client_options_mod from google.cloud.client import ClientWithProject from google.cloud.dns._http import Connection @@ -50,16 +51,37 @@ class Client(ClientWithProject): requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own library or partner tool. + + :type client_options: :class:`~google.api_core.client_options.ClientOptions` + or :class:`dict` + :param client_options: (Optional) Client options used to set user options + on the client. API Endpoint should be set through client_options. """ SCOPE = ("https://www.googleapis.com/auth/ndev.clouddns.readwrite",) """The scopes required for authenticating as a Cloud DNS consumer.""" - def __init__(self, project=None, credentials=None, _http=None, client_info=None): + def __init__( + self, + project=None, + credentials=None, + _http=None, + client_info=None, + client_options=None, + ): super(Client, self).__init__( project=project, credentials=credentials, _http=_http ) - self._connection = Connection(self, client_info=client_info) + + kwargs = {"client_info": client_info} + if client_options: + if isinstance(client_options, dict): + client_options = client_options_mod.from_dict(client_options) + + if client_options.api_endpoint: + kwargs["api_endpoint"] = client_options.api_endpoint + + self._connection = Connection(self, **kwargs) def quotas(self): """Return DNS quotas for the project associated with this client. diff --git a/dns/setup.py b/dns/setup.py index 3e9da9607d04..29b45df05e83 100644 --- a/dns/setup.py +++ b/dns/setup.py @@ -22,14 +22,14 @@ name = 'google-cloud-dns' description = 'Google Cloud DNS API client library' -version = '0.30.2' +version = '0.31.0' # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' # 'Development Status :: 5 - Production/Stable' release_status = 'Development Status :: 3 - Alpha' dependencies = [ - "google-cloud-core >= 1.0.0, < 2.0dev", + "google-cloud-core >= 1.0.3, < 2.0dev", ] extras = { } diff --git a/dns/tests/unit/test__http.py b/dns/tests/unit/test__http.py index 9c5198f0a7d2..d1b656c101a7 100644 --- a/dns/tests/unit/test__http.py +++ b/dns/tests/unit/test__http.py @@ -44,6 +44,12 @@ def test_build_api_url_w_extra_query_params(self): parms = dict(parse_qsl(qs)) self.assertEqual(parms["bar"], "baz") + def test_build_api_url_w_custom_endpoint(self): + custom_endpoint = "https://foo-dns.googleapis.com" + conn = self._make_one(object(), api_endpoint=custom_endpoint) + URI = "/".join([custom_endpoint, "dns", conn.API_VERSION, "foo"]) + self.assertEqual(conn.build_api_url("/foo"), URI) + def test_extra_headers(self): import requests from google.cloud import _http as base_http diff --git a/dns/tests/unit/test_client.py b/dns/tests/unit/test_client.py index 5ca6eec98a1f..2d1e274c98d9 100644 --- a/dns/tests/unit/test_client.py +++ b/dns/tests/unit/test_client.py @@ -37,7 +37,7 @@ def _get_target_class(): def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) - def test_ctor(self): + def test_ctor_defaults(self): from google.api_core.client_info import ClientInfo from google.cloud.dns._http import Connection @@ -48,6 +48,9 @@ def test_ctor(self): self.assertIs(client._connection.credentials, creds) self.assertIs(client._connection.http, http) self.assertIsInstance(client._connection._client_info, ClientInfo) + self.assertEqual( + client._connection.API_BASE_URL, client._connection.DEFAULT_API_ENDPOINT + ) def test_ctor_w_client_info(self): from google.api_core.client_info import ClientInfo @@ -65,6 +68,55 @@ def test_ctor_w_client_info(self): self.assertIs(client._connection.http, http) self.assertIs(client._connection._client_info, client_info) + def test_ctor_w_empty_client_options_object(self): + from google.api_core.client_info import ClientInfo + from google.api_core.client_options import ClientOptions + from google.cloud.dns._http import Connection + + creds = _make_credentials() + http = object() + client = self._make_one( + project=self.PROJECT, + credentials=creds, + _http=http, + client_options=ClientOptions(), + ) + self.assertIsInstance(client._connection, Connection) + self.assertIs(client._connection.credentials, creds) + self.assertIs(client._connection.http, http) + self.assertIsInstance(client._connection._client_info, ClientInfo) + self.assertEqual( + client._connection.API_BASE_URL, client._connection.DEFAULT_API_ENDPOINT + ) + + def test_ctor_w_client_options_object(self): + from google.api_core.client_options import ClientOptions + + api_endpoint = "https://foo-dns.googleapis.com" + creds = _make_credentials() + http = object() + client_options = ClientOptions(api_endpoint=api_endpoint) + client = self._make_one( + project=self.PROJECT, + credentials=creds, + _http=http, + client_options=client_options, + ) + self.assertEqual(client._connection.API_BASE_URL, api_endpoint) + + def test_ctor_w_client_options_dict(self): + api_endpoint = "https://foo-dns.googleapis.com" + creds = _make_credentials() + http = object() + client_options = {"api_endpoint": api_endpoint} + client = self._make_one( + project=self.PROJECT, + credentials=creds, + _http=http, + client_options=client_options, + ) + self.assertEqual(client._connection.API_BASE_URL, api_endpoint) + def test_quotas_defaults(self): PATH = "projects/%s" % (self.PROJECT,) MANAGED_ZONES = 1234 diff --git a/docs/Makefile b/docs/Makefile deleted file mode 100644 index d44af429f535..000000000000 --- a/docs/Makefile +++ /dev/null @@ -1,153 +0,0 @@ -# Makefile for Sphinx documentation -# - -# You can set these variables from the command line. -SPHINXOPTS = -SPHINXBUILD = sphinx-build -PAPER = -BUILDDIR = _build - -# Internal variables. -PAPEROPT_a4 = -D latex_paper_size=a4 -PAPEROPT_letter = -D latex_paper_size=letter -ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . -# the i18n builder cannot share the environment and doctrees with the others -I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . - -.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext - -help: - @echo "Please use \`make ' where is one of" - @echo " html to make standalone HTML files" - @echo " dirhtml to make HTML files named index.html in directories" - @echo " singlehtml to make a single large HTML file" - @echo " pickle to make pickle files" - @echo " json to make JSON files" - @echo " htmlhelp to make HTML files and a HTML help project" - @echo " qthelp to make HTML files and a qthelp project" - @echo " devhelp to make HTML files and a Devhelp project" - @echo " epub to make an epub" - @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" - @echo " latexpdf to make LaTeX files and run them through pdflatex" - @echo " text to make text files" - @echo " man to make manual pages" - @echo " texinfo to make Texinfo files" - @echo " info to make Texinfo files and run them through makeinfo" - @echo " gettext to make PO message catalogs" - @echo " changes to make an overview of all changed/added/deprecated items" - @echo " linkcheck to check all external links for integrity" - @echo " doctest to run all doctests embedded in the documentation (if enabled)" - -clean: - -rm -rf $(BUILDDIR)/* - -html: - $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html - @echo - @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." - -dirhtml: - $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml - @echo - @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." - -singlehtml: - $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml - @echo - @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." - -pickle: - $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle - @echo - @echo "Build finished; now you can process the pickle files." - -json: - $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json - @echo - @echo "Build finished; now you can process the JSON files." - -htmlhelp: - $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp - @echo - @echo "Build finished; now you can run HTML Help Workshop with the" \ - ".hhp project file in $(BUILDDIR)/htmlhelp." - -qthelp: - $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp - @echo - @echo "Build finished; now you can run "qcollectiongenerator" with the" \ - ".qhcp project file in $(BUILDDIR)/qthelp, like this:" - @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/google-cloud.qhcp" - @echo "To view the help file:" - @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/google-cloud.qhc" - -devhelp: - $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp - @echo - @echo "Build finished." - @echo "To view the help file:" - @echo "# mkdir -p $$HOME/.local/share/devhelp/google-cloud" - @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/google-cloud" - @echo "# devhelp" - -epub: - $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub - @echo - @echo "Build finished. The epub file is in $(BUILDDIR)/epub." - -latex: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo - @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." - @echo "Run \`make' in that directory to run these through (pdf)latex" \ - "(use \`make latexpdf' here to do that automatically)." - -latexpdf: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo "Running LaTeX files through pdflatex..." - $(MAKE) -C $(BUILDDIR)/latex all-pdf - @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." - -text: - $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text - @echo - @echo "Build finished. The text files are in $(BUILDDIR)/text." - -man: - $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man - @echo - @echo "Build finished. The manual pages are in $(BUILDDIR)/man." - -texinfo: - $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo - @echo - @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." - @echo "Run \`make' in that directory to run these through makeinfo" \ - "(use \`make info' here to do that automatically)." - -info: - $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo - @echo "Running Texinfo files through makeinfo..." - make -C $(BUILDDIR)/texinfo info - @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." - -gettext: - $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale - @echo - @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." - -changes: - $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes - @echo - @echo "The overview file is in $(BUILDDIR)/changes." - -linkcheck: - $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck - @echo - @echo "Link check complete; look for any errors in the above output " \ - "or in $(BUILDDIR)/linkcheck/output.txt." - -doctest: - $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest - @echo "Testing of doctests in the sources finished, look at the " \ - "results in $(BUILDDIR)/doctest/output.txt." diff --git a/docs/_static/custom.css b/docs/_static/custom.css deleted file mode 100644 index 3d0319dd337c..000000000000 --- a/docs/_static/custom.css +++ /dev/null @@ -1,16 +0,0 @@ -@import url('https://fonts.googleapis.com/css?family=Roboto|Roboto+Mono'); - -@media screen and (min-width: 1080px) { - div.document { - width: 1040px; - } -} - -code.descname { - color: #4885ed; -} - -th.field-name { - min-width: 100px; - color: #3cba54; -} diff --git a/docs/_static/images/favicon.ico b/docs/_static/images/favicon.ico deleted file mode 100644 index 23c553a2966c..000000000000 Binary files a/docs/_static/images/favicon.ico and /dev/null differ diff --git a/docs/_templates/autosummary/class.rst b/docs/_templates/autosummary/class.rst deleted file mode 120000 index bd3c7e22590e..000000000000 --- a/docs/_templates/autosummary/class.rst +++ /dev/null @@ -1 +0,0 @@ -../../../third_party/sphinx/sphinx/ext/autosummary/templates/autosummary/class.rst \ No newline at end of file diff --git a/docs/_templates/autosummary/module.rst b/docs/_templates/autosummary/module.rst deleted file mode 120000 index afd9c7b5e867..000000000000 --- a/docs/_templates/autosummary/module.rst +++ /dev/null @@ -1 +0,0 @@ -../../../third_party/sphinx/sphinx/ext/autosummary/templates/autosummary/module.rst \ No newline at end of file diff --git a/docs/asset b/docs/asset deleted file mode 120000 index 86aa41c40852..000000000000 --- a/docs/asset +++ /dev/null @@ -1 +0,0 @@ -../asset/docs \ No newline at end of file diff --git a/docs/automl b/docs/automl deleted file mode 120000 index 9e2b42485592..000000000000 --- a/docs/automl +++ /dev/null @@ -1 +0,0 @@ -../automl/docs \ No newline at end of file diff --git a/docs/bigquery b/docs/bigquery deleted file mode 120000 index eb7d0e491992..000000000000 --- a/docs/bigquery +++ /dev/null @@ -1 +0,0 @@ -../bigquery/docs/ \ No newline at end of file diff --git a/docs/bigquery_datatransfer b/docs/bigquery_datatransfer deleted file mode 120000 index 7e49b4933c3f..000000000000 --- a/docs/bigquery_datatransfer +++ /dev/null @@ -1 +0,0 @@ -../bigquery_datatransfer/docs/ \ No newline at end of file diff --git a/docs/bigquery_storage b/docs/bigquery_storage deleted file mode 120000 index 6c07150ad7db..000000000000 --- a/docs/bigquery_storage +++ /dev/null @@ -1 +0,0 @@ -../bigquery_storage/docs/ \ No newline at end of file diff --git a/docs/bigtable b/docs/bigtable deleted file mode 120000 index 27a5bfe5866c..000000000000 --- a/docs/bigtable +++ /dev/null @@ -1 +0,0 @@ -../bigtable/docs/ \ No newline at end of file diff --git a/docs/container b/docs/container deleted file mode 120000 index 348f01928ec5..000000000000 --- a/docs/container +++ /dev/null @@ -1 +0,0 @@ -../container/docs/ \ No newline at end of file diff --git a/docs/containeranalysis b/docs/containeranalysis deleted file mode 120000 index 5c1813ec7a94..000000000000 --- a/docs/containeranalysis +++ /dev/null @@ -1 +0,0 @@ -../containeranalysis/docs \ No newline at end of file diff --git a/docs/core/api_core_changelog.md b/docs/core/api_core_changelog.md deleted file mode 120000 index 0d7caa4a3e6d..000000000000 --- a/docs/core/api_core_changelog.md +++ /dev/null @@ -1 +0,0 @@ -../../api_core/CHANGELOG.md \ No newline at end of file diff --git a/docs/core/auth.rst b/docs/core/auth.rst deleted file mode 120000 index db1985a01d79..000000000000 --- a/docs/core/auth.rst +++ /dev/null @@ -1 +0,0 @@ -../../api_core/docs/auth.rst \ No newline at end of file diff --git a/docs/core/client.rst b/docs/core/client.rst deleted file mode 120000 index 0259d77ab42d..000000000000 --- a/docs/core/client.rst +++ /dev/null @@ -1 +0,0 @@ -../../core/docs/client.rst \ No newline at end of file diff --git a/docs/core/client_info.rst b/docs/core/client_info.rst deleted file mode 120000 index 011b596537b1..000000000000 --- a/docs/core/client_info.rst +++ /dev/null @@ -1 +0,0 @@ -../../api_core/docs/client_info.rst \ No newline at end of file diff --git a/docs/core/config.rst b/docs/core/config.rst deleted file mode 100644 index 139af98a42c5..000000000000 --- a/docs/core/config.rst +++ /dev/null @@ -1,68 +0,0 @@ -Configuration -************* - -Overview -======== - -Use service client objects to configure your applications. - -For example: - -.. code-block:: python - - >>> from google.cloud import bigquery - >>> client = bigquery.Client() - -When creating a client in this way, the project ID will be determined by -searching these locations in the following order. - -* GOOGLE_CLOUD_PROJECT environment variable -* GOOGLE_APPLICATION_CREDENTIALS JSON file -* Default service configuration path from - ``$ gcloud beta auth application-default login``. -* Google App Engine application ID -* Google Compute Engine project ID (from metadata server) - -You can override the detection of your default project by setting the - ``project`` parameter when creating client objects. - -.. code-block:: python - - >>> from google.cloud import bigquery - >>> client = bigquery.Client(project='my-project') - -You can see what project ID a client is referencing by accessing the ``project`` -property on the client object. - -.. code-block:: python - - >>> client.project - u'my-project' - -Authentication -============== - -The authentication credentials can be implicitly determined from the -environment or directly. See :doc:`/core/auth`. - -Logging in via ``gcloud beta auth application-default login`` will -automatically configure a JSON key file with your default project ID and -credentials. - -Setting the ``GOOGLE_APPLICATION_CREDENTIALS`` and ``GOOGLE_CLOUD_PROJECT`` -environment variables will override the automatically configured credentials. - -You can change your default project ID to ``my-new-default-project`` by -using the ``gcloud`` CLI tool to change the configuration. - -.. code-block:: bash - - $ gcloud config set project my-new-default-project - - -Environment Variables -===================== - -.. automodule:: google.cloud.environment_vars - :members: - :show-inheritance: diff --git a/docs/core/core_changelog.md b/docs/core/core_changelog.md deleted file mode 120000 index 3a3a672fc25e..000000000000 --- a/docs/core/core_changelog.md +++ /dev/null @@ -1 +0,0 @@ -../../core/CHANGELOG.md \ No newline at end of file diff --git a/docs/core/exceptions.rst b/docs/core/exceptions.rst deleted file mode 120000 index 47bcc6694524..000000000000 --- a/docs/core/exceptions.rst +++ /dev/null @@ -1 +0,0 @@ -../../api_core/docs/exceptions.rst \ No newline at end of file diff --git a/docs/core/helpers.rst b/docs/core/helpers.rst deleted file mode 120000 index 30f4943f9ff1..000000000000 --- a/docs/core/helpers.rst +++ /dev/null @@ -1 +0,0 @@ -../../api_core/docs/helpers.rst \ No newline at end of file diff --git a/docs/core/iam.rst b/docs/core/iam.rst deleted file mode 120000 index 349903e0f5a1..000000000000 --- a/docs/core/iam.rst +++ /dev/null @@ -1 +0,0 @@ -../../api_core/docs/iam.rst \ No newline at end of file diff --git a/docs/core/index.rst b/docs/core/index.rst deleted file mode 100644 index 45c68ad08ee2..000000000000 --- a/docs/core/index.rst +++ /dev/null @@ -1,31 +0,0 @@ -Core -==== - -.. toctree:: - config - auth - client - client_info - exceptions - helpers - iam - operation - operations_client - page_iterator - path_template - retry - timeout - -Changelog -~~~~~~~~~ - -The ``google-cloud-core`` package contains helpers common to all -``google-cloud-*`` packages. In an attempt to reach a stable API, -much of the functionality has been split out into a new package -``google-api-core``. - -.. toctree:: - :maxdepth: 2 - - ``google-api-core`` Changelog - ``google-cloud-core`` Changelog diff --git a/docs/core/operation.rst b/docs/core/operation.rst deleted file mode 120000 index 9a038fb6b79f..000000000000 --- a/docs/core/operation.rst +++ /dev/null @@ -1 +0,0 @@ -../../api_core/docs/operation.rst \ No newline at end of file diff --git a/docs/core/operations_client.rst b/docs/core/operations_client.rst deleted file mode 120000 index 17ab60406ba7..000000000000 --- a/docs/core/operations_client.rst +++ /dev/null @@ -1 +0,0 @@ -../../api_core/docs/operations_client.rst \ No newline at end of file diff --git a/docs/core/page_iterator.rst b/docs/core/page_iterator.rst deleted file mode 120000 index c815056363ab..000000000000 --- a/docs/core/page_iterator.rst +++ /dev/null @@ -1 +0,0 @@ -../../api_core/docs/page_iterator.rst \ No newline at end of file diff --git a/docs/core/path_template.rst b/docs/core/path_template.rst deleted file mode 120000 index 52a1bc7574b8..000000000000 --- a/docs/core/path_template.rst +++ /dev/null @@ -1 +0,0 @@ -../../api_core/docs/path_template.rst \ No newline at end of file diff --git a/docs/core/retry.rst b/docs/core/retry.rst deleted file mode 120000 index 0800fecb5177..000000000000 --- a/docs/core/retry.rst +++ /dev/null @@ -1 +0,0 @@ -../../api_core/docs/retry.rst \ No newline at end of file diff --git a/docs/core/timeout.rst b/docs/core/timeout.rst deleted file mode 120000 index 1ec11737305c..000000000000 --- a/docs/core/timeout.rst +++ /dev/null @@ -1 +0,0 @@ -../../api_core/docs/timeout.rst \ No newline at end of file diff --git a/docs/datacatalog b/docs/datacatalog deleted file mode 120000 index 41b79354a771..000000000000 --- a/docs/datacatalog +++ /dev/null @@ -1 +0,0 @@ -../datacatalog/docs \ No newline at end of file diff --git a/docs/datalabeling b/docs/datalabeling deleted file mode 120000 index 87c131ea5578..000000000000 --- a/docs/datalabeling +++ /dev/null @@ -1 +0,0 @@ -../datalabeling/docs \ No newline at end of file diff --git a/docs/dataproc b/docs/dataproc deleted file mode 120000 index 5547c1a0d489..000000000000 --- a/docs/dataproc +++ /dev/null @@ -1 +0,0 @@ -../dataproc/docs/ \ No newline at end of file diff --git a/docs/datastore b/docs/datastore deleted file mode 120000 index 8ce744facb5d..000000000000 --- a/docs/datastore +++ /dev/null @@ -1 +0,0 @@ -../datastore/docs/ \ No newline at end of file diff --git a/docs/dlp b/docs/dlp deleted file mode 120000 index b512b460a89c..000000000000 --- a/docs/dlp +++ /dev/null @@ -1 +0,0 @@ -../dlp/docs/ \ No newline at end of file diff --git a/docs/dns b/docs/dns deleted file mode 120000 index 9eea51dc71fe..000000000000 --- a/docs/dns +++ /dev/null @@ -1 +0,0 @@ -../dns/docs \ No newline at end of file diff --git a/docs/error-reporting b/docs/error-reporting deleted file mode 120000 index bb46a9501069..000000000000 --- a/docs/error-reporting +++ /dev/null @@ -1 +0,0 @@ -../error_reporting/docs/ \ No newline at end of file diff --git a/docs/firestore b/docs/firestore deleted file mode 120000 index 3c1d977bafcf..000000000000 --- a/docs/firestore +++ /dev/null @@ -1 +0,0 @@ -../firestore/docs/ \ No newline at end of file diff --git a/docs/grafeas b/docs/grafeas deleted file mode 120000 index 5e8270b35fb2..000000000000 --- a/docs/grafeas +++ /dev/null @@ -1 +0,0 @@ -../grafeas/docs \ No newline at end of file diff --git a/docs/iam b/docs/iam deleted file mode 120000 index 783f393b393c..000000000000 --- a/docs/iam +++ /dev/null @@ -1 +0,0 @@ -../iam/docs \ No newline at end of file diff --git a/docs/index.rst b/docs/index.rst deleted file mode 100644 index 09d44e06ccda..000000000000 --- a/docs/index.rst +++ /dev/null @@ -1,123 +0,0 @@ -.. toctree:: - :maxdepth: 1 - :hidden: - - Core Libraries - Asset Management - AutoML - BigQuery - BigQuery Data-Transfer - BigQuery Storage - Bigtable - Container - Container Analysis - Data Catalog - Data Labeling - Data Loss Prevention - Dataproc - Datastore - DNS - Firestore - Grafeas - IAM - IoT - Key Management - Natural Language - OSLogin - PubSub - Memorystore - Resource Manager - Runtime Configuration - Scheduler - Security Center - Security Scanner - Spanner - Speech - Stackdriver Error Reporting - Stackdriver Incident Response & Management - Stackdriver Logging - Stackdriver Monitoring - Stackdriver Trace - Storage - Talent - Tasks - Text-to-Speech - Translate - Video Intelligence - Vision - Web Risk - Release History - -Google Cloud Client Library for Python -====================================== - -Getting started ---------------- - -For more information on setting up your Python development environment, -such as installing ``pip`` and ``virtualenv`` on your system, please refer -to `Python Development Environment Setup Guide`_ for Google Cloud Platform. - -.. _Python Development Environment Setup Guide: https://cloud.google.com/python/setup - -Cloud Datastore -~~~~~~~~~~~~~~~ - -`Google Cloud Datastore`_ is a fully managed, schemaless database for storing -non-relational data. - -.. _Google Cloud Datastore: https://cloud.google.com/datastore/ - -Install the ``google-cloud-datastore`` library using ``pip``: - -.. code-block:: console - - $ pip install google-cloud-datastore - -Example -^^^^^^^ - -.. code-block:: python - - from google.cloud import datastore - - client = datastore.Client() - key = client.key('Person') - - entity = datastore.Entity(key=key) - entity['name'] = 'Your name' - entity['age'] = 25 - client.put(entity) - -Cloud Storage -~~~~~~~~~~~~~ - -`Google Cloud Storage`_ allows you to store data on Google infrastructure. - -.. _Google Cloud Storage: https://cloud.google.com/storage/ - -Install the ``google-cloud-storage`` library using ``pip``: - -.. code-block:: console - - $ pip install google-cloud-storage - -Example -^^^^^^^ - -.. code-block:: python - - from google.cloud import storage - - client = storage.Client() - bucket = client.get_bucket('') - blob = bucket.blob('my-test-file.txt') - blob.upload_from_string('this is test content!') - -Resources -~~~~~~~~~ - -* `GitHub `__ -* `Issues `__ -* `Stack Overflow `__ -* `PyPI `__ diff --git a/docs/iot b/docs/iot deleted file mode 120000 index e7b6a0ab0473..000000000000 --- a/docs/iot +++ /dev/null @@ -1 +0,0 @@ -../iot/docs \ No newline at end of file diff --git a/docs/irm b/docs/irm deleted file mode 120000 index 02724766d326..000000000000 --- a/docs/irm +++ /dev/null @@ -1 +0,0 @@ -../irm/docs \ No newline at end of file diff --git a/docs/kms b/docs/kms deleted file mode 120000 index 503e417ec91c..000000000000 --- a/docs/kms +++ /dev/null @@ -1 +0,0 @@ -../kms/docs/ \ No newline at end of file diff --git a/docs/language b/docs/language deleted file mode 120000 index ab6f1cc1e084..000000000000 --- a/docs/language +++ /dev/null @@ -1 +0,0 @@ -../language/docs/ \ No newline at end of file diff --git a/docs/logging b/docs/logging deleted file mode 120000 index 072fd0d1a72f..000000000000 --- a/docs/logging +++ /dev/null @@ -1 +0,0 @@ -../logging/docs/ \ No newline at end of file diff --git a/docs/monitoring b/docs/monitoring deleted file mode 120000 index d7a58517323e..000000000000 --- a/docs/monitoring +++ /dev/null @@ -1 +0,0 @@ -../monitoring/docs \ No newline at end of file diff --git a/docs/oslogin b/docs/oslogin deleted file mode 120000 index d26f32b7b0da..000000000000 --- a/docs/oslogin +++ /dev/null @@ -1 +0,0 @@ -../oslogin/docs/ \ No newline at end of file diff --git a/docs/pubsub b/docs/pubsub deleted file mode 120000 index 75a8b87c5ae0..000000000000 --- a/docs/pubsub +++ /dev/null @@ -1 +0,0 @@ -../pubsub/docs/ \ No newline at end of file diff --git a/docs/redis b/docs/redis deleted file mode 120000 index 351c953543ba..000000000000 --- a/docs/redis +++ /dev/null @@ -1 +0,0 @@ -../redis/docs \ No newline at end of file diff --git a/docs/releases.rst b/docs/releases.rst deleted file mode 100644 index cdabdf539783..000000000000 --- a/docs/releases.rst +++ /dev/null @@ -1,60 +0,0 @@ -######################### -``google-cloud`` Releases -######################### - -.. attention:: The ``google-cloud`` package is deprecated - - The ``google-cloud`` package is no longer maintained or updated. Instead, - install the ``google-cloud-*`` subpackages directly. - - -The ``google-cloud`` package (formerly ``gcloud``) contains -**all** ``google-cloud-*`` subpackages. - -* ``gcloud==0.01`` (`PyPI `__) -* ``gcloud==0.02`` (`PyPI `__) -* ``gcloud==0.02.1`` (`PyPI `__) -* ``gcloud==0.02.2`` (`PyPI `__) -* ``gcloud==0.3.0`` (`PyPI `__, `Release Notes `__) -* ``gcloud==0.4.0`` (`PyPI `__, `Release Notes `__) -* ``gcloud==0.4.1`` (`PyPI `__, `Release Notes `__) -* ``gcloud==0.4.2`` (`PyPI `__, `Release Notes `__) -* ``gcloud==0.4.3`` (`PyPI `__, `Release Notes `__) -* ``gcloud==0.5.0`` (`PyPI `__, `Release Notes `__) -* ``gcloud==0.6.0`` (`PyPI `__, `Release Notes `__) -* ``gcloud==0.7.0`` (`PyPI `__, `Release Notes `__) -* ``gcloud==0.7.1`` (`PyPI `__, `Release Notes `__) -* ``gcloud==0.8.0`` (`PyPI `__, `Release Notes `__) -* ``gcloud==0.9.0`` (`PyPI `__, `Release Notes `__) -* ``gcloud==0.10.0`` (`PyPI `__, `Release Notes `__) -* ``gcloud==0.10.1`` (`PyPI `__, `Release Notes `__) -* ``gcloud==0.11.0`` (`PyPI `__, `Release Notes `__) -* ``gcloud==0.12.0`` (`PyPI `__, `Release Notes `__) -* ``gcloud==0.12.1`` (`PyPI `__, `Release Notes `__) -* ``gcloud==0.13.0`` (`PyPI `__, `Release Notes `__) -* ``gcloud==0.14.0`` (`PyPI `__, `Release Notes `__) -* ``gcloud==0.15.0`` (`PyPI `__, `Release Notes `__) -* ``gcloud==0.16.0`` (`PyPI `__, `Release Notes `__) -* ``gcloud==0.16.1`` (`PyPI `__, `Release Notes `__) -* ``gcloud==0.17.0`` (`PyPI `__, `Release Notes `__) -* ``gcloud==0.18.0`` (`PyPI `__, `Release Notes `__) -* ``gcloud==0.18.1`` (`PyPI `__, `Release Notes `__) -* ``gcloud==0.18.2`` (`PyPI `__, `Release Notes `__) -* ``gcloud==0.18.3`` (`PyPI `__, `Release Notes `__) -* ``0.19.0`` (`PyPI `__, `Release Notes `__) -* ``0.20.0`` (`PyPI `__, `Release Notes `__) -* ``0.21.0`` (`PyPI `__, `Release Notes `__) -* ``0.21.1`` (`PyPI `__, `Release Notes `__) -* ``0.22.0`` (`PyPI `__, `Release Notes `__) -* ``0.23.0`` (`PyPI `__, `Release Notes `__) -* ``0.24.0`` (`PyPI `__, `Release Notes `__) -* ``0.25.0`` (`PyPI `__, `Release Notes `__) -* ``0.25.1`` (`PyPI `__, `Release Notes `__) -* ``0.26.0`` (`PyPI `__, `Release Notes `__) -* ``0.26.1`` (`PyPI `__, `Release Notes `__) -* ``0.27.0`` (`PyPI `__, `Release Notes `__) -* ``0.28.0`` (`PyPI `__, `Release Notes `__) -* ``0.29.0`` (`PyPI `__, `Release Notes `__) -* ``0.30.0`` (`PyPI `__, `Release Notes `__) -* ``0.31.0`` (`PyPI `__, `Release Notes `__) -* ``0.32.0`` (`PyPI `__, `Release Notes `__) diff --git a/docs/requirements.txt b/docs/requirements.txt deleted file mode 100644 index c5e6e1519dce..000000000000 --- a/docs/requirements.txt +++ /dev/null @@ -1,51 +0,0 @@ -setuptools >= 36.4.0 -sphinx>=1.6.3, <2.2 -ipython >= 4 -recommonmark >= 0.4.0 -grpcio-gcp >= 0.2.2 - --e api_core/ --e core/ --e storage/ --e asset/ --e automl/ --e bigquery/ --e bigquery_datatransfer/ --e bigquery_storage/ --e bigtable/ --e container/ --e containeranalysis/ --e datacatalog/ --e datalabeling/ --e dataproc/ --e datastore/ --e dlp/ --e dns/ --e firestore/ --e grafeas/ --e iam/ --e iot/ --e irm/ --e kms/ --e language/ --e logging/ --e error_reporting/ --e monitoring/ --e pubsub/ --e oslogin/ --e redis/ --e resource_manager/ --e runtimeconfig/ --e scheduler/ --e securitycenter/ --e spanner/ --e speech/ --e talent/ --e tasks/ --e texttospeech/ --e trace/ --e translate/ --e videointelligence/ --e vision/ --e webrisk/ --e websecurityscanner/ diff --git a/docs/resource-manager b/docs/resource-manager deleted file mode 120000 index 2bd0e30aaf64..000000000000 --- a/docs/resource-manager +++ /dev/null @@ -1 +0,0 @@ -../resource_manager/docs/ \ No newline at end of file diff --git a/docs/runtimeconfig b/docs/runtimeconfig deleted file mode 120000 index 4a0a8ad2ddea..000000000000 --- a/docs/runtimeconfig +++ /dev/null @@ -1 +0,0 @@ -../runtimeconfig/docs \ No newline at end of file diff --git a/docs/scheduler b/docs/scheduler deleted file mode 120000 index 767cd9a38af9..000000000000 --- a/docs/scheduler +++ /dev/null @@ -1 +0,0 @@ -../scheduler/docs \ No newline at end of file diff --git a/docs/securitycenter b/docs/securitycenter deleted file mode 120000 index c17cf5ceaec8..000000000000 --- a/docs/securitycenter +++ /dev/null @@ -1 +0,0 @@ -../securitycenter/docs \ No newline at end of file diff --git a/docs/spanner b/docs/spanner deleted file mode 120000 index e934485326b2..000000000000 --- a/docs/spanner +++ /dev/null @@ -1 +0,0 @@ -../spanner/docs/ \ No newline at end of file diff --git a/docs/speech b/docs/speech deleted file mode 120000 index 828be4c44930..000000000000 --- a/docs/speech +++ /dev/null @@ -1 +0,0 @@ -../speech/docs/ \ No newline at end of file diff --git a/docs/storage b/docs/storage deleted file mode 120000 index de74c3c4f7cf..000000000000 --- a/docs/storage +++ /dev/null @@ -1 +0,0 @@ -../storage/docs/ \ No newline at end of file diff --git a/docs/talent b/docs/talent deleted file mode 120000 index 90b2d20990c2..000000000000 --- a/docs/talent +++ /dev/null @@ -1 +0,0 @@ -../talent/docs \ No newline at end of file diff --git a/docs/tasks b/docs/tasks deleted file mode 120000 index 5974558b192a..000000000000 --- a/docs/tasks +++ /dev/null @@ -1 +0,0 @@ -../tasks/docs \ No newline at end of file diff --git a/docs/texttospeech b/docs/texttospeech deleted file mode 120000 index fcb796b9487d..000000000000 --- a/docs/texttospeech +++ /dev/null @@ -1 +0,0 @@ -../texttospeech/docs/ \ No newline at end of file diff --git a/docs/trace b/docs/trace deleted file mode 120000 index a2361d0f9a55..000000000000 --- a/docs/trace +++ /dev/null @@ -1 +0,0 @@ -../trace/docs \ No newline at end of file diff --git a/docs/translate b/docs/translate deleted file mode 120000 index 8ce8fe7bf496..000000000000 --- a/docs/translate +++ /dev/null @@ -1 +0,0 @@ -../translate/docs \ No newline at end of file diff --git a/docs/videointelligence b/docs/videointelligence deleted file mode 120000 index 7f22ebf79f2b..000000000000 --- a/docs/videointelligence +++ /dev/null @@ -1 +0,0 @@ -../videointelligence/docs/ \ No newline at end of file diff --git a/docs/vision b/docs/vision deleted file mode 120000 index bd63572ac784..000000000000 --- a/docs/vision +++ /dev/null @@ -1 +0,0 @@ -../vision/docs \ No newline at end of file diff --git a/docs/webrisk b/docs/webrisk deleted file mode 120000 index 6b09785486bf..000000000000 --- a/docs/webrisk +++ /dev/null @@ -1 +0,0 @@ -../webrisk/docs \ No newline at end of file diff --git a/docs/websecurityscanner b/docs/websecurityscanner deleted file mode 120000 index a0bc42093b44..000000000000 --- a/docs/websecurityscanner +++ /dev/null @@ -1 +0,0 @@ -../websecurityscanner/docs/ \ No newline at end of file diff --git a/error_reporting/CHANGELOG.md b/error_reporting/CHANGELOG.md index 81c20812af9f..aa3924d1f596 100644 --- a/error_reporting/CHANGELOG.md +++ b/error_reporting/CHANGELOG.md @@ -4,6 +4,24 @@ [1]: https://pypi.org/project/google-cloud-error-reporting/#history +## 0.33.0 + +10-22-2019 12:10 PDT + +### New Features +- Add `client_options` to constructor ([#9152](https://github.com/googleapis/google-cloud-python/pull/9152)) + +### Dependencies +- Pin `google-cloud-logging >= 1.14.0, < 2.0.0dev`. ([#9476](https://github.com/googleapis/google-cloud-python/pull/9476)) + +### Documentation +- Remove references to the old authentication credentials. ([#9456](https://github.com/googleapis/google-cloud-python/pull/9456)) +- Fix intersphinx reference to requests. ([#9294](https://github.com/googleapis/google-cloud-python/pull/9294)) +- Remove CI for `gh-pages`, use googleapis.dev for `api_core` refs. ([#9085](https://github.com/googleapis/google-cloud-python/pull/9085)) + +### Internal / Testing Changes +- Harden `test_report_exception` systest by increasing `max_tries`. ([#9396](https://github.com/googleapis/google-cloud-python/pull/9396)) + ## 0.32.1 08-23-2019 10:12 PDT diff --git a/error_reporting/README.rst b/error_reporting/README.rst index 39561dd92cb3..bce80fd49e14 100644 --- a/error_reporting/README.rst +++ b/error_reporting/README.rst @@ -85,7 +85,7 @@ Windows Next Steps ~~~~~~~~~~ -- Read the `Client Library Documentation`_ for Google Cloud Datastore API +- Read the `Client Library Documentation`_ for Google Cloud Datastore API to see other available methods on the client. - Read the `Product documentation`_ to learn more about the product and see How-to Guides. diff --git a/error_reporting/google/cloud/error_reporting/_gapic.py b/error_reporting/google/cloud/error_reporting/_gapic.py index b1925ff6a491..0c6ec9e60a1a 100644 --- a/error_reporting/google/cloud/error_reporting/_gapic.py +++ b/error_reporting/google/cloud/error_reporting/_gapic.py @@ -29,7 +29,9 @@ def make_report_error_api(client): :returns: An Error Reporting API instance. """ gapic_api = report_errors_service_client.ReportErrorsServiceClient( - credentials=client._credentials, client_info=client._client_info + credentials=client._credentials, + client_info=client._client_info, + client_options=client._client_options, ) return _ErrorReportingGapicApi(gapic_api, client.project) diff --git a/error_reporting/google/cloud/error_reporting/_logging.py b/error_reporting/google/cloud/error_reporting/_logging.py index 5eaa4693c96f..5832cc7e2d24 100644 --- a/error_reporting/google/cloud/error_reporting/_logging.py +++ b/error_reporting/google/cloud/error_reporting/_logging.py @@ -30,12 +30,12 @@ class _ErrorReportingLoggingAPI(object): passed falls back to the default inferred from the environment. - :type credentials: :class:`oauth2client.client.OAuth2Credentials` or + :type credentials: :class:`google.auth.credentials.Credentials` or :class:`NoneType` - :param credentials: The OAuth2 Credentials to use for the connection - owned by this client. If not passed (and if no - ``_http`` object is passed), falls back to the default - inferred from the environment. + :param credentials: The authorization credentials to attach to requests. + These credentials identify this application to the service. + If none are specified, the client will attempt to ascertain + the credentials from the environment. :type _http: :class:`~requests.Session` :param _http: (Optional) HTTP object to make requests. Can be any object @@ -54,11 +54,27 @@ class _ErrorReportingLoggingAPI(object): requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own library or partner tool. + + :type client_options: :class:`~google.api_core.client_options.ClientOptions` + or :class:`dict` + :param client_options: (Optional) Client options used to set user options + on the client. API Endpoint should be set through client_options. """ - def __init__(self, project, credentials=None, _http=None, client_info=None): + def __init__( + self, + project, + credentials=None, + _http=None, + client_info=None, + client_options=None, + ): self.logging_client = google.cloud.logging.client.Client( - project, credentials, _http=_http, client_info=client_info + project, + credentials, + _http=_http, + client_info=client_info, + client_options=client_options, ) def report_error_event(self, error_report): diff --git a/error_reporting/google/cloud/error_reporting/client.py b/error_reporting/google/cloud/error_reporting/client.py index 4fdae4c69e25..c4cb816ead75 100644 --- a/error_reporting/google/cloud/error_reporting/client.py +++ b/error_reporting/google/cloud/error_reporting/client.py @@ -96,12 +96,12 @@ class Client(ClientWithProject): passed falls back to the default inferred from the environment. - :type credentials: :class:`oauth2client.client.OAuth2Credentials` or + :type credentials: :class:`google.auth.credentials.Credentials` or :class:`NoneType` - :param credentials: The OAuth2 Credentials to use for the connection - owned by this client. If not passed (and if no - ``_http`` object is passed), falls back to the default - inferred from the environment. + :param credentials: The authorization credentials to attach to requests. + These credentials identify this application to the service. + If none are specified, the client will attempt to ascertain + the credentials from the environment. :type _http: :class:`~requests.Session` :param _http: (Optional) HTTP object to make requests. Can be any object @@ -143,6 +143,11 @@ class Client(ClientWithProject): you only need to set this if you're developing your own library or partner tool. + :type client_options: :class:`~google.api_core.client_options.ClientOptions` + or :class:`dict` + :param client_options: (Optional) Client options used to set user options + on the client. API Endpoint should be set through client_options. + :raises: :class:`ValueError` if the project is neither passed in nor set in the environment. """ @@ -158,6 +163,7 @@ def __init__( service=None, version=None, client_info=_CLIENT_INFO, + client_options=None, _use_grpc=None, ): super(Client, self).__init__( @@ -168,6 +174,7 @@ def __init__( self.service = service if service else self.DEFAULT_SERVICE self.version = version self._client_info = client_info + self._client_options = client_options if _use_grpc is None: self._use_grpc = _USE_GRPC @@ -195,7 +202,11 @@ def report_errors_api(self): self._report_errors_api = make_report_error_api(self) else: self._report_errors_api = _ErrorReportingLoggingAPI( - self.project, self._credentials, self._http, self._client_info + self.project, + self._credentials, + self._http, + self._client_info, + self._client_options, ) return self._report_errors_api diff --git a/error_reporting/setup.py b/error_reporting/setup.py index da91c2707903..80a0350ad79a 100644 --- a/error_reporting/setup.py +++ b/error_reporting/setup.py @@ -22,13 +22,13 @@ name = "google-cloud-error-reporting" description = "Stackdriver Error Reporting API client library" -version = "0.32.1" +version = "0.33.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' # 'Development Status :: 5 - Production/Stable' release_status = "Development Status :: 3 - Alpha" -dependencies = ["google-cloud-logging>=1.11.0, <2.0dev"] +dependencies = ["google-cloud-logging>=1.14.0, <2.0dev"] extras = {} diff --git a/error_reporting/tests/system/test_system.py b/error_reporting/tests/system/test_system.py index b72223e2ae0a..cf454aecdbd1 100644 --- a/error_reporting/tests/system/test_system.py +++ b/error_reporting/tests/system/test_system.py @@ -117,7 +117,7 @@ def test_report_exception(self): is_one = functools.partial(operator.eq, 1) is_one.__name__ = "is_one" # partial() has no name. - retry = RetryResult(is_one, max_tries=6) + retry = RetryResult(is_one, max_tries=8) wrapped_get_count = retry(_get_error_count) error_count = wrapped_get_count(class_name, Config.CLIENT) diff --git a/error_reporting/tests/unit/test__gapic.py b/error_reporting/tests/unit/test__gapic.py index 836f46b82495..00940f466df0 100644 --- a/error_reporting/tests/unit/test__gapic.py +++ b/error_reporting/tests/unit/test__gapic.py @@ -25,7 +25,9 @@ def _call_fut(client): return make_report_error_api(client) def test_make_report_error_api(self): - client = mock.Mock(spec=["project", "_credentials", "_client_info"]) + client = mock.Mock( + spec=["project", "_credentials", "_client_info", "_client_options"] + ) # Call the function being tested. patch = mock.patch( @@ -41,7 +43,9 @@ def test_make_report_error_api(self): self.assertIs(report_error_client._project, client.project) self.assertIs(report_error_client._gapic_api, patched.return_value) patched.assert_called_once_with( - credentials=client._credentials, client_info=client._client_info + credentials=client._credentials, + client_info=client._client_info, + client_options=client._client_options, ) diff --git a/error_reporting/tests/unit/test__logging.py b/error_reporting/tests/unit/test__logging.py index e2b0638b986e..726eaabfec75 100644 --- a/error_reporting/tests/unit/test__logging.py +++ b/error_reporting/tests/unit/test__logging.py @@ -40,7 +40,7 @@ def test_ctor_defaults(self, mocked_cls): self.assertIs(logging_api.logging_client, mocked_cls.return_value) mocked_cls.assert_called_once_with( - self.PROJECT, credentials, _http=None, client_info=None + self.PROJECT, credentials, _http=None, client_info=None, client_options=None ) @mock.patch("google.cloud.logging.client.Client") @@ -48,14 +48,23 @@ def test_ctor_explicit(self, mocked_cls): credentials = _make_credentials() http = mock.Mock() client_info = mock.Mock() + client_options = mock.Mock() logging_api = self._make_one( - self.PROJECT, credentials, _http=http, client_info=client_info + self.PROJECT, + credentials, + _http=http, + client_info=client_info, + client_options=client_options, ) self.assertIs(logging_api.logging_client, mocked_cls.return_value) mocked_cls.assert_called_once_with( - self.PROJECT, credentials, _http=http, client_info=client_info + self.PROJECT, + credentials, + _http=http, + client_info=client_info, + client_options=client_options, ) @mock.patch("google.cloud.logging.client.Client") diff --git a/error_reporting/tests/unit/test_client.py b/error_reporting/tests/unit/test_client.py index 5e4dc925a65f..3a7290e8aa96 100644 --- a/error_reporting/tests/unit/test_client.py +++ b/error_reporting/tests/unit/test_client.py @@ -67,16 +67,19 @@ def test_ctor_defaults(self, default_mock): def test_ctor_explicit(self): credentials = _make_credentials() client_info = mock.Mock() + client_options = mock.Mock() client = self._make_one( project=self.PROJECT, credentials=credentials, service=self.SERVICE, version=self.VERSION, client_info=client_info, + client_options=client_options, ) self.assertEqual(client.service, self.SERVICE) self.assertEqual(client.version, self.VERSION) self.assertIs(client._client_info, client_info) + self.assertIs(client._client_options, client_options) def test_report_errors_api_already(self): credentials = _make_credentials() @@ -87,11 +90,13 @@ def test_report_errors_api_already(self): def test_report_errors_api_wo_grpc(self): credentials = _make_credentials() client_info = mock.Mock() + client_options = mock.Mock() http = mock.Mock() client = self._make_one( project=self.PROJECT, credentials=credentials, client_info=client_info, + client_options=client_options, _http=http, _use_grpc=False, ) @@ -103,7 +108,9 @@ def test_report_errors_api_wo_grpc(self): api = client.report_errors_api self.assertIs(api, patched.return_value) - patched.assert_called_once_with(self.PROJECT, credentials, http, client_info) + patched.assert_called_once_with( + self.PROJECT, credentials, http, client_info, client_options + ) def test_report_errors_api_w_grpc(self): credentials = _make_credentials() diff --git a/firestore/CHANGELOG.md b/firestore/CHANGELOG.md index 918252cd8628..efdeb52be0b6 100644 --- a/firestore/CHANGELOG.md +++ b/firestore/CHANGELOG.md @@ -5,6 +5,32 @@ [1]: https://pypi.org/project/google-cloud-firestore/#history +## 1.5.0 + +10-15-2019 06:45 PDT + + +### Implementation Changes +- Expand dotted keys in mappings used as cursors. ([#8568](https://github.com/googleapis/google-cloud-python/pull/8568)) +- Tweak GAPIC client configuration (via synth). ([#9173](https://github.com/googleapis/google-cloud-python/pull/9173)) + +### New Features +- Add `IN`, `ARRAY_CONTAINS_ANY` operators; update docstrings (via synth). ([#9439](https://github.com/googleapis/google-cloud-python/pull/9439)) +- Add `COLLECTION_GROUP` to `Index.QueryScope` enum; update docstrings (via synth). ([#9253](https://github.com/googleapis/google-cloud-python/pull/9253)) +- Add `client_options` to v1 client. ([#9048](https://github.com/googleapis/google-cloud-python/pull/9048)) + +### Dependencies +- Pin 'google-cloud-core >= 1.0.3, < 2.0.0dev'. ([#9445](https://github.com/googleapis/google-cloud-python/pull/9445)) + +### Documentation +- Update README example to use non-deprecated `query.get`. ([#9235](https://github.com/googleapis/google-cloud-python/pull/9235)) +- Remove duplicated word in README. ([#9297](https://github.com/googleapis/google-cloud-python/pull/9297)) +- Fix intersphinx reference to `requests`. ([#9294](https://github.com/googleapis/google-cloud-python/pull/9294)) +- Remove CI for gh-pages, use googleapis.dev for `api_core refs`. ([#9085](https://github.com/googleapis/google-cloud-python/pull/9085)) +- Add license file. ([#9109](https://github.com/googleapis/google-cloud-python/pull/9109)) +- Fix reference to library name ([#9047](https://github.com/googleapis/google-cloud-python/pull/9047)) +- Remove compatability badges from READMEs. ([#9035](https://github.com/googleapis/google-cloud-python/pull/9035)) + ## 1.4.0 08-06-2019 11:43 PDT diff --git a/firestore/google/cloud/firestore_admin_v1/proto/operation.proto b/firestore/google/cloud/firestore_admin_v1/proto/operation.proto index 08194fe09341..6494ab7cba99 100644 --- a/firestore/google/cloud/firestore_admin_v1/proto/operation.proto +++ b/firestore/google/cloud/firestore_admin_v1/proto/operation.proto @@ -162,17 +162,6 @@ message ExportDocumentsResponse { string output_uri_prefix = 1; } -// Describes the progress of the operation. -// Unit of work is generic and must be interpreted based on where [Progress][google.firestore.admin.v1.Progress] -// is used. -message Progress { - // The amount of work estimated. - int64 estimated_work = 1; - - // The amount of work completed. - int64 completed_work = 2; -} - // Describes the state of the operation. enum OperationState { // Unspecified. @@ -201,3 +190,14 @@ enum OperationState { // google.longrunning.Operations.CancelOperation. CANCELLED = 7; } + +// Describes the progress of the operation. +// Unit of work is generic and must be interpreted based on where [Progress][google.firestore.admin.v1.Progress] +// is used. +message Progress { + // The amount of work estimated. + int64 estimated_work = 1; + + // The amount of work completed. + int64 completed_work = 2; +} diff --git a/firestore/google/cloud/firestore_v1/gapic/enums.py b/firestore/google/cloud/firestore_v1/gapic/enums.py index 1220f0d917ed..857e350e454d 100644 --- a/firestore/google/cloud/firestore_v1/gapic/enums.py +++ b/firestore/google/cloud/firestore_v1/gapic/enums.py @@ -91,6 +91,10 @@ class Operator(enum.IntEnum): ``order_by``. EQUAL (int): Equal. ARRAY_CONTAINS (int): Contains. Requires that the field is an array. + IN (int): In. Requires that ``value`` is a non-empty ArrayValue with at most 10 + values. + ARRAY_CONTAINS_ANY (int): Contains any. Requires that the field is an array and ``value`` is a + non-empty ArrayValue with at most 10 values. """ OPERATOR_UNSPECIFIED = 0 @@ -100,6 +104,8 @@ class Operator(enum.IntEnum): GREATER_THAN_OR_EQUAL = 4 EQUAL = 5 ARRAY_CONTAINS = 7 + IN = 8 + ARRAY_CONTAINS_ANY = 9 class UnaryFilter(object): class Operator(enum.IntEnum): diff --git a/firestore/google/cloud/firestore_v1/proto/query.proto b/firestore/google/cloud/firestore_v1/proto/query.proto index e2d7b836ff2d..a8d5e7a2ebb1 100644 --- a/firestore/google/cloud/firestore_v1/proto/query.proto +++ b/firestore/google/cloud/firestore_v1/proto/query.proto @@ -102,6 +102,14 @@ message StructuredQuery { // Contains. Requires that the field is an array. ARRAY_CONTAINS = 7; + + // In. Requires that `value` is a non-empty ArrayValue with at most 10 + // values. + IN = 8; + + // Contains any. Requires that the field is an array and + // `value` is a non-empty ArrayValue with at most 10 values. + ARRAY_CONTAINS_ANY = 9; } // The field to filter by. @@ -114,18 +122,6 @@ message StructuredQuery { Value value = 3; } - // A sort direction. - enum Direction { - // Unspecified. - DIRECTION_UNSPECIFIED = 0; - - // Ascending. - ASCENDING = 1; - - // Descending. - DESCENDING = 2; - } - // A filter with a single operand. message UnaryFilter { // A unary operator. @@ -150,6 +146,15 @@ message StructuredQuery { } } + // The projection of document's fields to return. + message Projection { + // The fields to return. + // + // If empty, all fields are returned. To only return the name + // of the document, use `['__name__']`. + repeated FieldReference fields = 2; + } + // An order on a field. message Order { // The field to order by. @@ -164,13 +169,16 @@ message StructuredQuery { string field_path = 2; } - // The projection of document's fields to return. - message Projection { - // The fields to return. - // - // If empty, all fields are returned. To only return the name - // of the document, use `['__name__']`. - repeated FieldReference fields = 2; + // A sort direction. + enum Direction { + // Unspecified. + DIRECTION_UNSPECIFIED = 0; + + // Ascending. + ASCENDING = 1; + + // Descending. + DESCENDING = 2; } // The projection to return. diff --git a/firestore/google/cloud/firestore_v1/proto/query_pb2.py b/firestore/google/cloud/firestore_v1/proto/query_pb2.py index 089b33a34419..057de927633c 100644 --- a/firestore/google/cloud/firestore_v1/proto/query_pb2.py +++ b/firestore/google/cloud/firestore_v1/proto/query_pb2.py @@ -30,7 +30,7 @@ "\n\027com.google.firestore.v1B\nQueryProtoP\001Z=": _operator_enum.GREATER_THAN_OR_EQUAL, ">": _operator_enum.GREATER_THAN, "array_contains": _operator_enum.ARRAY_CONTAINS, + "in": _operator_enum.IN, + "array_contains_any": _operator_enum.ARRAY_CONTAINS_ANY, } _BAD_OP_STRING = "Operator string {!r} is invalid. Valid choices are: {}." _BAD_OP_NAN_NULL = 'Only an equality filter ("==") can be used with None or NaN values' diff --git a/firestore/google/cloud/firestore_v1beta1/gapic/enums.py b/firestore/google/cloud/firestore_v1beta1/gapic/enums.py index 1220f0d917ed..857e350e454d 100644 --- a/firestore/google/cloud/firestore_v1beta1/gapic/enums.py +++ b/firestore/google/cloud/firestore_v1beta1/gapic/enums.py @@ -91,6 +91,10 @@ class Operator(enum.IntEnum): ``order_by``. EQUAL (int): Equal. ARRAY_CONTAINS (int): Contains. Requires that the field is an array. + IN (int): In. Requires that ``value`` is a non-empty ArrayValue with at most 10 + values. + ARRAY_CONTAINS_ANY (int): Contains any. Requires that the field is an array and ``value`` is a + non-empty ArrayValue with at most 10 values. """ OPERATOR_UNSPECIFIED = 0 @@ -100,6 +104,8 @@ class Operator(enum.IntEnum): GREATER_THAN_OR_EQUAL = 4 EQUAL = 5 ARRAY_CONTAINS = 7 + IN = 8 + ARRAY_CONTAINS_ANY = 9 class UnaryFilter(object): class Operator(enum.IntEnum): diff --git a/firestore/google/cloud/firestore_v1beta1/proto/query.proto b/firestore/google/cloud/firestore_v1beta1/proto/query.proto index fb9e4e558004..a8068ae6c2f3 100644 --- a/firestore/google/cloud/firestore_v1beta1/proto/query.proto +++ b/firestore/google/cloud/firestore_v1beta1/proto/query.proto @@ -102,6 +102,14 @@ message StructuredQuery { // Contains. Requires that the field is an array. ARRAY_CONTAINS = 7; + + // In. Requires that `value` is a non-empty ArrayValue with at most 10 + // values. + IN = 8; + + // Contains any. Requires that the field is an array and + // `value` is a non-empty ArrayValue with at most 10 values. + ARRAY_CONTAINS_ANY = 9; } // The field to filter by. @@ -147,6 +155,15 @@ message StructuredQuery { Direction direction = 2; } + // The projection of document's fields to return. + message Projection { + // The fields to return. + // + // If empty, all fields are returned. To only return the name + // of the document, use `['__name__']`. + repeated FieldReference fields = 2; + } + // A reference to a field, such as `max(messages.time) as max_time`. message FieldReference { string field_path = 2; @@ -164,15 +181,6 @@ message StructuredQuery { DESCENDING = 2; } - // The projection of document's fields to return. - message Projection { - // The fields to return. - // - // If empty, all fields are returned. To only return the name - // of the document, use `['__name__']`. - repeated FieldReference fields = 2; - } - // The projection to return. Projection select = 1; diff --git a/firestore/google/cloud/firestore_v1beta1/proto/query_pb2.py b/firestore/google/cloud/firestore_v1beta1/proto/query_pb2.py index 4e0b57845a09..70c26f514e23 100644 --- a/firestore/google/cloud/firestore_v1beta1/proto/query_pb2.py +++ b/firestore/google/cloud/firestore_v1beta1/proto/query_pb2.py @@ -30,7 +30,7 @@ "\n\034com.google.firestore.v1beta1B\nQueryProtoP\001ZAgoogle.golang.org/genproto/googleapis/firestore/v1beta1;firestore\242\002\004GCFS\252\002\036Google.Cloud.Firestore.V1Beta1\312\002\036Google\\Cloud\\Firestore\\V1beta1" ), serialized_pb=_b( - '\n0google/cloud/firestore_v1beta1/proto/query.proto\x12\x18google.firestore.v1beta1\x1a\x33google/cloud/firestore_v1beta1/proto/document.proto\x1a\x1egoogle/protobuf/wrappers.proto\x1a\x1cgoogle/api/annotations.proto"\xb9\x0f\n\x0fStructuredQuery\x12\x44\n\x06select\x18\x01 \x01(\x0b\x32\x34.google.firestore.v1beta1.StructuredQuery.Projection\x12J\n\x04\x66rom\x18\x02 \x03(\x0b\x32<.google.firestore.v1beta1.StructuredQuery.CollectionSelector\x12?\n\x05where\x18\x03 \x01(\x0b\x32\x30.google.firestore.v1beta1.StructuredQuery.Filter\x12\x41\n\x08order_by\x18\x04 \x03(\x0b\x32/.google.firestore.v1beta1.StructuredQuery.Order\x12\x32\n\x08start_at\x18\x07 \x01(\x0b\x32 .google.firestore.v1beta1.Cursor\x12\x30\n\x06\x65nd_at\x18\x08 \x01(\x0b\x32 .google.firestore.v1beta1.Cursor\x12\x0e\n\x06offset\x18\x06 \x01(\x05\x12*\n\x05limit\x18\x05 \x01(\x0b\x32\x1b.google.protobuf.Int32Value\x1a\x44\n\x12\x43ollectionSelector\x12\x15\n\rcollection_id\x18\x02 \x01(\t\x12\x17\n\x0f\x61ll_descendants\x18\x03 \x01(\x08\x1a\x8c\x02\n\x06\x46ilter\x12U\n\x10\x63omposite_filter\x18\x01 \x01(\x0b\x32\x39.google.firestore.v1beta1.StructuredQuery.CompositeFilterH\x00\x12M\n\x0c\x66ield_filter\x18\x02 \x01(\x0b\x32\x35.google.firestore.v1beta1.StructuredQuery.FieldFilterH\x00\x12M\n\x0cunary_filter\x18\x03 \x01(\x0b\x32\x35.google.firestore.v1beta1.StructuredQuery.UnaryFilterH\x00\x42\r\n\x0b\x66ilter_type\x1a\xd3\x01\n\x0f\x43ompositeFilter\x12N\n\x02op\x18\x01 \x01(\x0e\x32\x42.google.firestore.v1beta1.StructuredQuery.CompositeFilter.Operator\x12\x41\n\x07\x66ilters\x18\x02 \x03(\x0b\x32\x30.google.firestore.v1beta1.StructuredQuery.Filter"-\n\x08Operator\x12\x18\n\x14OPERATOR_UNSPECIFIED\x10\x00\x12\x07\n\x03\x41ND\x10\x01\x1a\xec\x02\n\x0b\x46ieldFilter\x12G\n\x05\x66ield\x18\x01 \x01(\x0b\x32\x38.google.firestore.v1beta1.StructuredQuery.FieldReference\x12J\n\x02op\x18\x02 \x01(\x0e\x32>.google.firestore.v1beta1.StructuredQuery.FieldFilter.Operator\x12.\n\x05value\x18\x03 \x01(\x0b\x32\x1f.google.firestore.v1beta1.Value"\x97\x01\n\x08Operator\x12\x18\n\x14OPERATOR_UNSPECIFIED\x10\x00\x12\r\n\tLESS_THAN\x10\x01\x12\x16\n\x12LESS_THAN_OR_EQUAL\x10\x02\x12\x10\n\x0cGREATER_THAN\x10\x03\x12\x19\n\x15GREATER_THAN_OR_EQUAL\x10\x04\x12\t\n\x05\x45QUAL\x10\x05\x12\x12\n\x0e\x41RRAY_CONTAINS\x10\x07\x1a\xf3\x01\n\x0bUnaryFilter\x12J\n\x02op\x18\x01 \x01(\x0e\x32>.google.firestore.v1beta1.StructuredQuery.UnaryFilter.Operator\x12I\n\x05\x66ield\x18\x02 \x01(\x0b\x32\x38.google.firestore.v1beta1.StructuredQuery.FieldReferenceH\x00"=\n\x08Operator\x12\x18\n\x14OPERATOR_UNSPECIFIED\x10\x00\x12\n\n\x06IS_NAN\x10\x02\x12\x0b\n\x07IS_NULL\x10\x03\x42\x0e\n\x0coperand_type\x1a\x98\x01\n\x05Order\x12G\n\x05\x66ield\x18\x01 \x01(\x0b\x32\x38.google.firestore.v1beta1.StructuredQuery.FieldReference\x12\x46\n\tdirection\x18\x02 \x01(\x0e\x32\x33.google.firestore.v1beta1.StructuredQuery.Direction\x1a$\n\x0e\x46ieldReference\x12\x12\n\nfield_path\x18\x02 \x01(\t\x1aV\n\nProjection\x12H\n\x06\x66ields\x18\x02 \x03(\x0b\x32\x38.google.firestore.v1beta1.StructuredQuery.FieldReference"E\n\tDirection\x12\x19\n\x15\x44IRECTION_UNSPECIFIED\x10\x00\x12\r\n\tASCENDING\x10\x01\x12\x0e\n\nDESCENDING\x10\x02"I\n\x06\x43ursor\x12/\n\x06values\x18\x01 \x03(\x0b\x32\x1f.google.firestore.v1beta1.Value\x12\x0e\n\x06\x62\x65\x66ore\x18\x02 \x01(\x08\x42\xb8\x01\n\x1c\x63om.google.firestore.v1beta1B\nQueryProtoP\x01ZAgoogle.golang.org/genproto/googleapis/firestore/v1beta1;firestore\xa2\x02\x04GCFS\xaa\x02\x1eGoogle.Cloud.Firestore.V1Beta1\xca\x02\x1eGoogle\\Cloud\\Firestore\\V1beta1b\x06proto3' + '\n0google/cloud/firestore_v1beta1/proto/query.proto\x12\x18google.firestore.v1beta1\x1a\x33google/cloud/firestore_v1beta1/proto/document.proto\x1a\x1egoogle/protobuf/wrappers.proto\x1a\x1cgoogle/api/annotations.proto"\xd9\x0f\n\x0fStructuredQuery\x12\x44\n\x06select\x18\x01 \x01(\x0b\x32\x34.google.firestore.v1beta1.StructuredQuery.Projection\x12J\n\x04\x66rom\x18\x02 \x03(\x0b\x32<.google.firestore.v1beta1.StructuredQuery.CollectionSelector\x12?\n\x05where\x18\x03 \x01(\x0b\x32\x30.google.firestore.v1beta1.StructuredQuery.Filter\x12\x41\n\x08order_by\x18\x04 \x03(\x0b\x32/.google.firestore.v1beta1.StructuredQuery.Order\x12\x32\n\x08start_at\x18\x07 \x01(\x0b\x32 .google.firestore.v1beta1.Cursor\x12\x30\n\x06\x65nd_at\x18\x08 \x01(\x0b\x32 .google.firestore.v1beta1.Cursor\x12\x0e\n\x06offset\x18\x06 \x01(\x05\x12*\n\x05limit\x18\x05 \x01(\x0b\x32\x1b.google.protobuf.Int32Value\x1a\x44\n\x12\x43ollectionSelector\x12\x15\n\rcollection_id\x18\x02 \x01(\t\x12\x17\n\x0f\x61ll_descendants\x18\x03 \x01(\x08\x1a\x8c\x02\n\x06\x46ilter\x12U\n\x10\x63omposite_filter\x18\x01 \x01(\x0b\x32\x39.google.firestore.v1beta1.StructuredQuery.CompositeFilterH\x00\x12M\n\x0c\x66ield_filter\x18\x02 \x01(\x0b\x32\x35.google.firestore.v1beta1.StructuredQuery.FieldFilterH\x00\x12M\n\x0cunary_filter\x18\x03 \x01(\x0b\x32\x35.google.firestore.v1beta1.StructuredQuery.UnaryFilterH\x00\x42\r\n\x0b\x66ilter_type\x1a\xd3\x01\n\x0f\x43ompositeFilter\x12N\n\x02op\x18\x01 \x01(\x0e\x32\x42.google.firestore.v1beta1.StructuredQuery.CompositeFilter.Operator\x12\x41\n\x07\x66ilters\x18\x02 \x03(\x0b\x32\x30.google.firestore.v1beta1.StructuredQuery.Filter"-\n\x08Operator\x12\x18\n\x14OPERATOR_UNSPECIFIED\x10\x00\x12\x07\n\x03\x41ND\x10\x01\x1a\x8c\x03\n\x0b\x46ieldFilter\x12G\n\x05\x66ield\x18\x01 \x01(\x0b\x32\x38.google.firestore.v1beta1.StructuredQuery.FieldReference\x12J\n\x02op\x18\x02 \x01(\x0e\x32>.google.firestore.v1beta1.StructuredQuery.FieldFilter.Operator\x12.\n\x05value\x18\x03 \x01(\x0b\x32\x1f.google.firestore.v1beta1.Value"\xb7\x01\n\x08Operator\x12\x18\n\x14OPERATOR_UNSPECIFIED\x10\x00\x12\r\n\tLESS_THAN\x10\x01\x12\x16\n\x12LESS_THAN_OR_EQUAL\x10\x02\x12\x10\n\x0cGREATER_THAN\x10\x03\x12\x19\n\x15GREATER_THAN_OR_EQUAL\x10\x04\x12\t\n\x05\x45QUAL\x10\x05\x12\x12\n\x0e\x41RRAY_CONTAINS\x10\x07\x12\x06\n\x02IN\x10\x08\x12\x16\n\x12\x41RRAY_CONTAINS_ANY\x10\t\x1a\xf3\x01\n\x0bUnaryFilter\x12J\n\x02op\x18\x01 \x01(\x0e\x32>.google.firestore.v1beta1.StructuredQuery.UnaryFilter.Operator\x12I\n\x05\x66ield\x18\x02 \x01(\x0b\x32\x38.google.firestore.v1beta1.StructuredQuery.FieldReferenceH\x00"=\n\x08Operator\x12\x18\n\x14OPERATOR_UNSPECIFIED\x10\x00\x12\n\n\x06IS_NAN\x10\x02\x12\x0b\n\x07IS_NULL\x10\x03\x42\x0e\n\x0coperand_type\x1a\x98\x01\n\x05Order\x12G\n\x05\x66ield\x18\x01 \x01(\x0b\x32\x38.google.firestore.v1beta1.StructuredQuery.FieldReference\x12\x46\n\tdirection\x18\x02 \x01(\x0e\x32\x33.google.firestore.v1beta1.StructuredQuery.Direction\x1aV\n\nProjection\x12H\n\x06\x66ields\x18\x02 \x03(\x0b\x32\x38.google.firestore.v1beta1.StructuredQuery.FieldReference\x1a$\n\x0e\x46ieldReference\x12\x12\n\nfield_path\x18\x02 \x01(\t"E\n\tDirection\x12\x19\n\x15\x44IRECTION_UNSPECIFIED\x10\x00\x12\r\n\tASCENDING\x10\x01\x12\x0e\n\nDESCENDING\x10\x02"I\n\x06\x43ursor\x12/\n\x06values\x18\x01 \x03(\x0b\x32\x1f.google.firestore.v1beta1.Value\x12\x0e\n\x06\x62\x65\x66ore\x18\x02 \x01(\x08\x42\xb8\x01\n\x1c\x63om.google.firestore.v1beta1B\nQueryProtoP\x01ZAgoogle.golang.org/genproto/googleapis/firestore/v1beta1;firestore\xa2\x02\x04GCFS\xaa\x02\x1eGoogle.Cloud.Firestore.V1Beta1\xca\x02\x1eGoogle\\Cloud\\Firestore\\V1beta1b\x06proto3' ), dependencies=[ google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2.DESCRIPTOR, @@ -103,11 +103,21 @@ _descriptor.EnumValueDescriptor( name="ARRAY_CONTAINS", index=6, number=7, serialized_options=None, type=None ), + _descriptor.EnumValueDescriptor( + name="IN", index=7, number=8, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="ARRAY_CONTAINS_ANY", + index=8, + number=9, + serialized_options=None, + type=None, + ), ], containing_type=None, serialized_options=None, serialized_start=1422, - serialized_end=1573, + serialized_end=1605, ) _sym_db.RegisterEnumDescriptor(_STRUCTUREDQUERY_FIELDFILTER_OPERATOR) @@ -133,8 +143,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=1742, - serialized_end=1803, + serialized_start=1774, + serialized_end=1835, ) _sym_db.RegisterEnumDescriptor(_STRUCTUREDQUERY_UNARYFILTER_OPERATOR) @@ -160,8 +170,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=2102, - serialized_end=2171, + serialized_start=2134, + serialized_end=2203, ) _sym_db.RegisterEnumDescriptor(_STRUCTUREDQUERY_DIRECTION) @@ -431,7 +441,7 @@ extension_ranges=[], oneofs=[], serialized_start=1209, - serialized_end=1573, + serialized_end=1605, ) _STRUCTUREDQUERY_UNARYFILTER = _descriptor.Descriptor( @@ -494,8 +504,8 @@ fields=[], ) ], - serialized_start=1576, - serialized_end=1819, + serialized_start=1608, + serialized_end=1851, ) _STRUCTUREDQUERY_ORDER = _descriptor.Descriptor( @@ -550,27 +560,27 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1822, - serialized_end=1974, + serialized_start=1854, + serialized_end=2006, ) -_STRUCTUREDQUERY_FIELDREFERENCE = _descriptor.Descriptor( - name="FieldReference", - full_name="google.firestore.v1beta1.StructuredQuery.FieldReference", +_STRUCTUREDQUERY_PROJECTION = _descriptor.Descriptor( + name="Projection", + full_name="google.firestore.v1beta1.StructuredQuery.Projection", filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( - name="field_path", - full_name="google.firestore.v1beta1.StructuredQuery.FieldReference.field_path", + name="fields", + full_name="google.firestore.v1beta1.StructuredQuery.Projection.fields", index=0, number=2, - type=9, - cpp_type=9, - label=1, + type=11, + cpp_type=10, + label=3, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=[], message_type=None, enum_type=None, containing_type=None, @@ -588,27 +598,27 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1976, - serialized_end=2012, + serialized_start=2008, + serialized_end=2094, ) -_STRUCTUREDQUERY_PROJECTION = _descriptor.Descriptor( - name="Projection", - full_name="google.firestore.v1beta1.StructuredQuery.Projection", +_STRUCTUREDQUERY_FIELDREFERENCE = _descriptor.Descriptor( + name="FieldReference", + full_name="google.firestore.v1beta1.StructuredQuery.FieldReference", filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( - name="fields", - full_name="google.firestore.v1beta1.StructuredQuery.Projection.fields", + name="field_path", + full_name="google.firestore.v1beta1.StructuredQuery.FieldReference.field_path", index=0, number=2, - type=11, - cpp_type=10, - label=3, + type=9, + cpp_type=9, + label=1, has_default_value=False, - default_value=[], + default_value=_b("").decode("utf-8"), message_type=None, enum_type=None, containing_type=None, @@ -626,8 +636,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2014, - serialized_end=2100, + serialized_start=2096, + serialized_end=2132, ) _STRUCTUREDQUERY = _descriptor.Descriptor( @@ -790,8 +800,8 @@ _STRUCTUREDQUERY_FIELDFILTER, _STRUCTUREDQUERY_UNARYFILTER, _STRUCTUREDQUERY_ORDER, - _STRUCTUREDQUERY_FIELDREFERENCE, _STRUCTUREDQUERY_PROJECTION, + _STRUCTUREDQUERY_FIELDREFERENCE, ], enum_types=[_STRUCTUREDQUERY_DIRECTION], serialized_options=None, @@ -800,7 +810,7 @@ extension_ranges=[], oneofs=[], serialized_start=194, - serialized_end=2171, + serialized_end=2203, ) @@ -856,8 +866,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2173, - serialized_end=2246, + serialized_start=2205, + serialized_end=2278, ) _STRUCTUREDQUERY_COLLECTIONSELECTOR.containing_type = _STRUCTUREDQUERY @@ -933,11 +943,11 @@ "direction" ].enum_type = _STRUCTUREDQUERY_DIRECTION _STRUCTUREDQUERY_ORDER.containing_type = _STRUCTUREDQUERY -_STRUCTUREDQUERY_FIELDREFERENCE.containing_type = _STRUCTUREDQUERY _STRUCTUREDQUERY_PROJECTION.fields_by_name[ "fields" ].message_type = _STRUCTUREDQUERY_FIELDREFERENCE _STRUCTUREDQUERY_PROJECTION.containing_type = _STRUCTUREDQUERY +_STRUCTUREDQUERY_FIELDREFERENCE.containing_type = _STRUCTUREDQUERY _STRUCTUREDQUERY.fields_by_name["select"].message_type = _STRUCTUREDQUERY_PROJECTION _STRUCTUREDQUERY.fields_by_name[ "from" @@ -1084,17 +1094,6 @@ # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.StructuredQuery.Order) ), ), - FieldReference=_reflection.GeneratedProtocolMessageType( - "FieldReference", - (_message.Message,), - dict( - DESCRIPTOR=_STRUCTUREDQUERY_FIELDREFERENCE, - __module__="google.cloud.firestore_v1beta1.proto.query_pb2", - __doc__="""A reference to a field, such as ``max(messages.time) as max_time``. - """, - # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.StructuredQuery.FieldReference) - ), - ), Projection=_reflection.GeneratedProtocolMessageType( "Projection", (_message.Message,), @@ -1112,6 +1111,17 @@ # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.StructuredQuery.Projection) ), ), + FieldReference=_reflection.GeneratedProtocolMessageType( + "FieldReference", + (_message.Message,), + dict( + DESCRIPTOR=_STRUCTUREDQUERY_FIELDREFERENCE, + __module__="google.cloud.firestore_v1beta1.proto.query_pb2", + __doc__="""A reference to a field, such as ``max(messages.time) as max_time``. + """, + # @@protoc_insertion_point(class_scope:google.firestore.v1beta1.StructuredQuery.FieldReference) + ), + ), DESCRIPTOR=_STRUCTUREDQUERY, __module__="google.cloud.firestore_v1beta1.proto.query_pb2", __doc__="""A Firestore query. @@ -1160,8 +1170,8 @@ _sym_db.RegisterMessage(StructuredQuery.FieldFilter) _sym_db.RegisterMessage(StructuredQuery.UnaryFilter) _sym_db.RegisterMessage(StructuredQuery.Order) -_sym_db.RegisterMessage(StructuredQuery.FieldReference) _sym_db.RegisterMessage(StructuredQuery.Projection) +_sym_db.RegisterMessage(StructuredQuery.FieldReference) Cursor = _reflection.GeneratedProtocolMessageType( "Cursor", diff --git a/firestore/setup.py b/firestore/setup.py index 0c736ab3c028..48dc96203fa9 100644 --- a/firestore/setup.py +++ b/firestore/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-firestore" description = "Google Cloud Firestore API client library" -version = "1.4.0" +version = "1.5.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' @@ -30,7 +30,7 @@ release_status = "Development Status :: 4 - Beta" dependencies = [ "google-api-core[grpc] >= 1.14.0, < 2.0.0dev", - "google-cloud-core >= 1.0.0, < 2.0dev", + "google-cloud-core >= 1.0.3, < 2.0dev", "pytz", ] extras = {} diff --git a/firestore/synth.metadata b/firestore/synth.metadata index ab85f3700be6..e22035e702bf 100644 --- a/firestore/synth.metadata +++ b/firestore/synth.metadata @@ -1,18 +1,19 @@ { - "updateTime": "2019-09-19T12:22:24.552315Z", + "updateTime": "2019-10-10T12:25:00.305808Z", "sources": [ { "generator": { "name": "artman", - "version": "0.36.3", - "dockerImage": "googleapis/artman@sha256:66ca01f27ef7dc50fbfb7743b67028115a6a8acf43b2d82f9fc826de008adac4" + "version": "0.38.0", + "dockerImage": "googleapis/artman@sha256:0d2f8d429110aeb8d82df6550ef4ede59d40df9062d260a1580fce688b0512bf" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "23f6c4d8d49ef3f1aaa45768869d8616efe4a307" + "sha": "10f91fa12f70e8e0209a45fc10807ed1f77c7e4e", + "internalRef": "273826591" } }, { diff --git a/firestore/tests/system/test_system.py b/firestore/tests/system/test_system.py index f2d30c94a171..71ac07fcee74 100644 --- a/firestore/tests/system/test_system.py +++ b/firestore/tests/system/test_system.py @@ -492,11 +492,13 @@ def test_collection_add(client, cleanup): assert set(collection3.list_documents()) == {document_ref5} -def test_query_stream(client, cleanup): +@pytest.fixture +def query_docs(client): collection_id = "qs" + UNIQUE_RESOURCE_ID sub_collection = "child" + UNIQUE_RESOURCE_ID collection = client.collection(collection_id, "doc", sub_collection) + cleanup = [] stored = {} num_vals = 5 allowed_vals = six.moves.xrange(num_vals) @@ -505,38 +507,82 @@ def test_query_stream(client, cleanup): document_data = { "a": a_val, "b": b_val, + "c": [a_val, num_vals * 100], "stats": {"sum": a_val + b_val, "product": a_val * b_val}, } _, doc_ref = collection.add(document_data) # Add to clean-up. - cleanup(doc_ref.delete) + cleanup.append(doc_ref.delete) stored[doc_ref.id] = document_data - # 0. Limit to snapshots where ``a==1``. - query0 = collection.where("a", "==", 1) - values0 = {snapshot.id: snapshot.to_dict() for snapshot in query0.stream()} - assert len(values0) == num_vals - for key, value in six.iteritems(values0): + yield collection, stored, allowed_vals + + for operation in cleanup: + operation() + + +def test_query_stream_w_simple_field_eq_op(query_docs): + collection, stored, allowed_vals = query_docs + query = collection.where("a", "==", 1) + values = {snapshot.id: snapshot.to_dict() for snapshot in query.stream()} + assert len(values) == len(allowed_vals) + for key, value in six.iteritems(values): + assert stored[key] == value + assert value["a"] == 1 + + +def test_query_stream_w_simple_field_array_contains_op(query_docs): + collection, stored, allowed_vals = query_docs + query = collection.where("c", "array_contains", 1) + values = {snapshot.id: snapshot.to_dict() for snapshot in query.stream()} + assert len(values) == len(allowed_vals) + for key, value in six.iteritems(values): + assert stored[key] == value + assert value["a"] == 1 + + +def test_query_stream_w_simple_field_in_op(query_docs): + collection, stored, allowed_vals = query_docs + num_vals = len(allowed_vals) + query = collection.where("a", "in", [1, num_vals + 100]) + values = {snapshot.id: snapshot.to_dict() for snapshot in query.stream()} + assert len(values) == len(allowed_vals) + for key, value in six.iteritems(values): assert stored[key] == value assert value["a"] == 1 - # 1. Order by ``b``. - query1 = collection.order_by("b", direction=query0.DESCENDING) - values1 = [(snapshot.id, snapshot.to_dict()) for snapshot in query1.stream()] - assert len(values1) == len(stored) - b_vals1 = [] - for key, value in values1: + +def test_query_stream_w_simple_field_array_contains_any_op(query_docs): + collection, stored, allowed_vals = query_docs + num_vals = len(allowed_vals) + query = collection.where("c", "array_contains_any", [1, num_vals * 200]) + values = {snapshot.id: snapshot.to_dict() for snapshot in query.stream()} + assert len(values) == len(allowed_vals) + for key, value in six.iteritems(values): assert stored[key] == value - b_vals1.append(value["b"]) + assert value["a"] == 1 + + +def test_query_stream_w_order_by(query_docs): + collection, stored, allowed_vals = query_docs + query = collection.order_by("b", direction=firestore.Query.DESCENDING) + values = [(snapshot.id, snapshot.to_dict()) for snapshot in query.stream()] + assert len(values) == len(stored) + b_vals = [] + for key, value in values: + assert stored[key] == value + b_vals.append(value["b"]) # Make sure the ``b``-values are in DESCENDING order. - assert sorted(b_vals1, reverse=True) == b_vals1 + assert sorted(b_vals, reverse=True) == b_vals + - # 2. Limit to snapshots where ``stats.sum > 1`` (a field path). - query2 = collection.where("stats.sum", ">", 4) - values2 = {snapshot.id: snapshot.to_dict() for snapshot in query2.stream()} - assert len(values2) == 10 +def test_query_stream_w_field_path(query_docs): + collection, stored, allowed_vals = query_docs + query = collection.where("stats.sum", ">", 4) + values = {snapshot.id: snapshot.to_dict() for snapshot in query.stream()} + assert len(values) == 10 ab_pairs2 = set() - for key, value in six.iteritems(values2): + for key, value in six.iteritems(values): assert stored[key] == value ab_pairs2.add((value["a"], value["b"])) @@ -550,63 +596,72 @@ def test_query_stream(client, cleanup): ) assert expected_ab_pairs == ab_pairs2 - # 3. Use a start and end cursor. - query3 = ( + +def test_query_stream_w_start_end_cursor(query_docs): + collection, stored, allowed_vals = query_docs + num_vals = len(allowed_vals) + query = ( collection.order_by("a") .start_at({"a": num_vals - 2}) .end_before({"a": num_vals - 1}) ) - values3 = [(snapshot.id, snapshot.to_dict()) for snapshot in query3.stream()] - assert len(values3) == num_vals - for key, value in values3: + values = [(snapshot.id, snapshot.to_dict()) for snapshot in query.stream()] + assert len(values) == num_vals + for key, value in values: assert stored[key] == value assert value["a"] == num_vals - 2 - b_vals1.append(value["b"]) - - # 4. Send a query with no results. - query4 = collection.where("b", "==", num_vals + 100) - values4 = list(query4.stream()) - assert len(values4) == 0 - - # 5. Select a subset of fields. - query5 = collection.where("b", "<=", 1) - query5 = query5.select(["a", "stats.product"]) - values5 = {snapshot.id: snapshot.to_dict() for snapshot in query5.stream()} - assert len(values5) == num_vals * 2 # a ANY, b in (0, 1) - for key, value in six.iteritems(values5): + + +def test_query_stream_wo_results(query_docs): + collection, stored, allowed_vals = query_docs + num_vals = len(allowed_vals) + query = collection.where("b", "==", num_vals + 100) + values = list(query.stream()) + assert len(values) == 0 + + +def test_query_stream_w_projection(query_docs): + collection, stored, allowed_vals = query_docs + num_vals = len(allowed_vals) + query = collection.where("b", "<=", 1).select(["a", "stats.product"]) + values = {snapshot.id: snapshot.to_dict() for snapshot in query.stream()} + assert len(values) == num_vals * 2 # a ANY, b in (0, 1) + for key, value in six.iteritems(values): expected = { "a": stored[key]["a"], "stats": {"product": stored[key]["stats"]["product"]}, } assert expected == value - # 6. Add multiple filters via ``where()``. - query6 = collection.where("stats.product", ">", 5) - query6 = query6.where("stats.product", "<", 10) - values6 = {snapshot.id: snapshot.to_dict() for snapshot in query6.stream()} +def test_query_stream_w_multiple_filters(query_docs): + collection, stored, allowed_vals = query_docs + query = collection.where("stats.product", ">", 5).where("stats.product", "<", 10) + values = {snapshot.id: snapshot.to_dict() for snapshot in query.stream()} matching_pairs = [ (a_val, b_val) for a_val in allowed_vals for b_val in allowed_vals if 5 < a_val * b_val < 10 ] - assert len(values6) == len(matching_pairs) - for key, value in six.iteritems(values6): + assert len(values) == len(matching_pairs) + for key, value in six.iteritems(values): assert stored[key] == value pair = (value["a"], value["b"]) assert pair in matching_pairs - # 7. Skip the first three results, when ``b==2`` - query7 = collection.where("b", "==", 2) + +def test_query_stream_w_offset(query_docs): + collection, stored, allowed_vals = query_docs + num_vals = len(allowed_vals) offset = 3 - query7 = query7.offset(offset) - values7 = {snapshot.id: snapshot.to_dict() for snapshot in query7.stream()} + query = collection.where("b", "==", 2).offset(offset) + values = {snapshot.id: snapshot.to_dict() for snapshot in query.stream()} # NOTE: We don't check the ``a``-values, since that would require # an ``order_by('a')``, which combined with the ``b == 2`` # filter would necessitate an index. - assert len(values7) == num_vals - offset - for key, value in six.iteritems(values7): + assert len(values) == num_vals - offset + for key, value in six.iteritems(values): assert stored[key] == value assert value["b"] == 2 diff --git a/firestore/tests/unit/v1/test_query.py b/firestore/tests/unit/v1/test_query.py index a4911fecb44f..bdb0e922d00b 100644 --- a/firestore/tests/unit/v1/test_query.py +++ b/firestore/tests/unit/v1/test_query.py @@ -1464,18 +1464,47 @@ def _call_fut(op_string): return _enum_from_op_string(op_string) - def test_success(self): + @staticmethod + def _get_op_class(): from google.cloud.firestore_v1.gapic import enums - op_class = enums.StructuredQuery.FieldFilter.Operator + return enums.StructuredQuery.FieldFilter.Operator + + def test_lt(self): + op_class = self._get_op_class() self.assertEqual(self._call_fut("<"), op_class.LESS_THAN) + + def test_le(self): + op_class = self._get_op_class() self.assertEqual(self._call_fut("<="), op_class.LESS_THAN_OR_EQUAL) + + def test_eq(self): + op_class = self._get_op_class() self.assertEqual(self._call_fut("=="), op_class.EQUAL) + + def test_ge(self): + op_class = self._get_op_class() self.assertEqual(self._call_fut(">="), op_class.GREATER_THAN_OR_EQUAL) + + def test_gt(self): + op_class = self._get_op_class() self.assertEqual(self._call_fut(">"), op_class.GREATER_THAN) + + def test_array_contains(self): + op_class = self._get_op_class() self.assertEqual(self._call_fut("array_contains"), op_class.ARRAY_CONTAINS) - def test_failure(self): + def test_in(self): + op_class = self._get_op_class() + self.assertEqual(self._call_fut("in"), op_class.IN) + + def test_array_contains_any(self): + op_class = self._get_op_class() + self.assertEqual( + self._call_fut("array_contains_any"), op_class.ARRAY_CONTAINS_ANY + ) + + def test_invalid(self): with self.assertRaises(ValueError): self._call_fut("?") diff --git a/grafeas/CHANGELOG.md b/grafeas/CHANGELOG.md index c9f55c42c36d..b66a945bfffe 100644 --- a/grafeas/CHANGELOG.md +++ b/grafeas/CHANGELOG.md @@ -4,6 +4,23 @@ [1]: https://pypi.org/project/grafeas/#history +## 0.3.0 + +10-10-2019 11:28 PDT + + +### Implementation Changes +- Remove send / receive message size limit (via synth). ([#8981](https://github.com/googleapis/google-cloud-python/pull/8981)) + +### Dependencies +- Bump minimum version for google-api-core to 1.14.0. ([#8709](https://github.com/googleapis/google-cloud-python/pull/8709)) + +### Documentation +- Fix intersphinx reference to requests. ([#9294](https://github.com/googleapis/google-cloud-python/pull/9294)) +- Remove CI for gh-pages, use googleapis.dev for `api_core` refs. ([#9085](https://github.com/googleapis/google-cloud-python/pull/9085)) +- Update intersphinx mapping for requests. ([#8805](https://github.com/googleapis/google-cloud-python/pull/8805)) +- Link to googleapis.dev documentation in READMEs. ([#8705](https://github.com/googleapis/google-cloud-python/pull/8705)) + ## 0.2.0 07-12-2019 17:04 PDT diff --git a/grafeas/grafeas/grafeas.py b/grafeas/grafeas/grafeas.py index 768aa8c77f29..35dae0565d59 100644 --- a/grafeas/grafeas/grafeas.py +++ b/grafeas/grafeas/grafeas.py @@ -22,4 +22,8 @@ from grafeas.grafeas_v1 import types -__all__ = ("enums", "types", "GrafeasClient") +__all__ = ( + "enums", + "types", + "GrafeasClient", +) diff --git a/grafeas/grafeas/grafeas_v1/__init__.py b/grafeas/grafeas/grafeas_v1/__init__.py index 9bbb0db16767..24d3a43d8000 100644 --- a/grafeas/grafeas/grafeas_v1/__init__.py +++ b/grafeas/grafeas/grafeas_v1/__init__.py @@ -27,4 +27,8 @@ class GrafeasClient(grafeas_client.GrafeasClient): enums = enums -__all__ = ("enums", "types", "GrafeasClient") +__all__ = ( + "enums", + "types", + "GrafeasClient", +) diff --git a/grafeas/grafeas/grafeas_v1/gapic/grafeas_client.py b/grafeas/grafeas/grafeas_v1/gapic/grafeas_client.py index a7e3c6d31713..544632304ea7 100644 --- a/grafeas/grafeas/grafeas_v1/gapic/grafeas_client.py +++ b/grafeas/grafeas/grafeas_v1/gapic/grafeas_client.py @@ -39,7 +39,7 @@ from grafeas.grafeas_v1.proto import grafeas_pb2_grpc -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("grafeas").version +_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("grafeas",).version class GrafeasClient(object): @@ -69,7 +69,7 @@ class GrafeasClient(object): def note_path(cls, project, note): """Return a fully-qualified note string.""" return google.api_core.path_template.expand( - "projects/{project}/notes/{note}", project=project, note=note + "projects/{project}/notes/{note}", project=project, note=note, ) @classmethod @@ -85,7 +85,7 @@ def occurrence_path(cls, project, occurrence): def project_path(cls, project): """Return a fully-qualified project string.""" return google.api_core.path_template.expand( - "projects/{project}", project=project + "projects/{project}", project=project, ) def __init__(self, transport, client_config=None, client_info=None): @@ -125,7 +125,7 @@ def __init__(self, transport, client_config=None, client_info=None): if client_info is None: client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION + gapic_version=_GAPIC_LIBRARY_VERSION, ) else: client_info.gapic_version = _GAPIC_LIBRARY_VERSION @@ -136,7 +136,7 @@ def __init__(self, transport, client_config=None, client_info=None): # (Ordinarily, these are the defaults specified in the `*_config.py` # file next to this one.) self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config["interfaces"][self._INTERFACE_NAME] + client_config["interfaces"][self._INTERFACE_NAME], ) # Save a dictionary of cached API call functions. @@ -202,7 +202,7 @@ def get_occurrence( client_info=self._client_info, ) - request = grafeas_pb2.GetOccurrenceRequest(name=name) + request = grafeas_pb2.GetOccurrenceRequest(name=name,) if metadata is None: metadata = [] metadata = list(metadata) @@ -300,7 +300,7 @@ def list_occurrences( ) request = grafeas_pb2.ListOccurrencesRequest( - parent=parent, filter=filter_, page_size=page_size + parent=parent, filter=filter_, page_size=page_size, ) if metadata is None: metadata = [] @@ -385,7 +385,7 @@ def delete_occurrence( client_info=self._client_info, ) - request = grafeas_pb2.DeleteOccurrenceRequest(name=name) + request = grafeas_pb2.DeleteOccurrenceRequest(name=name,) if metadata is None: metadata = [] metadata = list(metadata) @@ -468,7 +468,7 @@ def create_occurrence( ) request = grafeas_pb2.CreateOccurrenceRequest( - parent=parent, occurrence=occurrence + parent=parent, occurrence=occurrence, ) if metadata is None: metadata = [] @@ -552,7 +552,7 @@ def batch_create_occurrences( ) request = grafeas_pb2.BatchCreateOccurrencesRequest( - parent=parent, occurrences=occurrences + parent=parent, occurrences=occurrences, ) if metadata is None: metadata = [] @@ -641,7 +641,7 @@ def update_occurrence( ) request = grafeas_pb2.UpdateOccurrenceRequest( - name=name, occurrence=occurrence, update_mask=update_mask + name=name, occurrence=occurrence, update_mask=update_mask, ) if metadata is None: metadata = [] @@ -717,7 +717,7 @@ def get_occurrence_note( client_info=self._client_info, ) - request = grafeas_pb2.GetOccurrenceNoteRequest(name=name) + request = grafeas_pb2.GetOccurrenceNoteRequest(name=name,) if metadata is None: metadata = [] metadata = list(metadata) @@ -791,7 +791,7 @@ def get_note( client_info=self._client_info, ) - request = grafeas_pb2.GetNoteRequest(name=name) + request = grafeas_pb2.GetNoteRequest(name=name,) if metadata is None: metadata = [] metadata = list(metadata) @@ -889,7 +889,7 @@ def list_notes( ) request = grafeas_pb2.ListNotesRequest( - parent=parent, filter=filter_, page_size=page_size + parent=parent, filter=filter_, page_size=page_size, ) if metadata is None: metadata = [] @@ -972,7 +972,7 @@ def delete_note( client_info=self._client_info, ) - request = grafeas_pb2.DeleteNoteRequest(name=name) + request = grafeas_pb2.DeleteNoteRequest(name=name,) if metadata is None: metadata = [] metadata = list(metadata) @@ -1060,7 +1060,7 @@ def create_note( ) request = grafeas_pb2.CreateNoteRequest( - parent=parent, note_id=note_id, note=note + parent=parent, note_id=note_id, note=note, ) if metadata is None: metadata = [] @@ -1143,7 +1143,7 @@ def batch_create_notes( client_info=self._client_info, ) - request = grafeas_pb2.BatchCreateNotesRequest(parent=parent, notes=notes) + request = grafeas_pb2.BatchCreateNotesRequest(parent=parent, notes=notes,) if metadata is None: metadata = [] metadata = list(metadata) @@ -1231,7 +1231,7 @@ def update_note( ) request = grafeas_pb2.UpdateNoteRequest( - name=name, note=note, update_mask=update_mask + name=name, note=note, update_mask=update_mask, ) if metadata is None: metadata = [] @@ -1332,7 +1332,7 @@ def list_note_occurrences( ) request = grafeas_pb2.ListNoteOccurrencesRequest( - name=name, filter=filter_, page_size=page_size + name=name, filter=filter_, page_size=page_size, ) if metadata is None: metadata = [] diff --git a/grafeas/grafeas/grafeas_v1/gapic/transports/grafeas_grpc_transport.py b/grafeas/grafeas/grafeas_v1/gapic/transports/grafeas_grpc_transport.py index 2e438b464d3d..b7769a71eced 100644 --- a/grafeas/grafeas/grafeas_v1/gapic/transports/grafeas_grpc_transport.py +++ b/grafeas/grafeas/grafeas_v1/gapic/transports/grafeas_grpc_transport.py @@ -49,7 +49,7 @@ def __init__(self, address, scopes, channel=None, credentials=None): # exception (channels come with credentials baked in already). if channel is not None and credentials is not None: raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive." + "The `channel` and `credentials` arguments are mutually " "exclusive.", ) # Create the channel. @@ -68,7 +68,9 @@ def __init__(self, address, scopes, channel=None, credentials=None): # gRPC uses objects called "stubs" that are bound to the # channel and provide a basic method for each RPC. - self._stubs = {"grafeas_stub": grafeas_pb2_grpc.GrafeasStub(channel)} + self._stubs = { + "grafeas_stub": grafeas_pb2_grpc.GrafeasStub(channel), + } @classmethod def create_channel(cls, address, scopes, credentials=None, **kwargs): diff --git a/grafeas/grafeas/grafeas_v1/proto/attestation_pb2.py b/grafeas/grafeas/grafeas_v1/proto/attestation_pb2.py index 356e70bd170a..41f8ca1e2b13 100644 --- a/grafeas/grafeas/grafeas_v1/proto/attestation_pb2.py +++ b/grafeas/grafeas/grafeas_v1/proto/attestation_pb2.py @@ -28,7 +28,7 @@ serialized_pb=_b( '\n"grafeas_v1/proto/attestation.proto\x12\ngrafeas.v1\x1a\x1dgrafeas_v1/proto/common.proto"f\n\x0f\x41ttestationNote\x12.\n\x04hint\x18\x01 \x01(\x0b\x32 .grafeas.v1.AttestationNote.Hint\x1a#\n\x04Hint\x12\x1b\n\x13human_readable_name\x18\x01 \x01(\t"^\n\x15\x41ttestationOccurrence\x12\x1a\n\x12serialized_payload\x18\x01 \x01(\x0c\x12)\n\nsignatures\x18\x02 \x03(\x0b\x32\x15.grafeas.v1.SignatureBQ\n\rio.grafeas.v1P\x01Z8google.golang.org/genproto/googleapis/grafeas/v1;grafeas\xa2\x02\x03GRAb\x06proto3' ), - dependencies=[grafeas__v1_dot_proto_dot_common__pb2.DESCRIPTOR], + dependencies=[grafeas__v1_dot_proto_dot_common__pb2.DESCRIPTOR,], ) @@ -56,7 +56,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -94,10 +94,10 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], - nested_types=[_ATTESTATIONNOTE_HINT], + nested_types=[_ATTESTATIONNOTE_HINT,], enum_types=[], serialized_options=None, is_extendable=False, diff --git a/grafeas/grafeas/grafeas_v1/proto/build_pb2.py b/grafeas/grafeas/grafeas_v1/proto/build_pb2.py index 09be6011a42a..affcce26b1f3 100644 --- a/grafeas/grafeas/grafeas_v1/proto/build_pb2.py +++ b/grafeas/grafeas/grafeas_v1/proto/build_pb2.py @@ -30,7 +30,7 @@ serialized_pb=_b( '\n\x1cgrafeas_v1/proto/build.proto\x12\ngrafeas.v1\x1a!grafeas_v1/proto/provenance.proto"$\n\tBuildNote\x12\x17\n\x0f\x62uilder_version\x18\x01 \x01(\t"\\\n\x0f\x42uildOccurrence\x12/\n\nprovenance\x18\x01 \x01(\x0b\x32\x1b.grafeas.v1.BuildProvenance\x12\x18\n\x10provenance_bytes\x18\x02 \x01(\tBQ\n\rio.grafeas.v1P\x01Z8google.golang.org/genproto/googleapis/grafeas/v1;grafeas\xa2\x02\x03GRAb\x06proto3' ), - dependencies=[grafeas__v1_dot_proto_dot_provenance__pb2.DESCRIPTOR], + dependencies=[grafeas__v1_dot_proto_dot_provenance__pb2.DESCRIPTOR,], ) @@ -58,7 +58,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], diff --git a/grafeas/grafeas/grafeas_v1/proto/deployment_pb2.py b/grafeas/grafeas/grafeas_v1/proto/deployment_pb2.py index 4f410ee35d56..b5fea556d0ac 100644 --- a/grafeas/grafeas/grafeas_v1/proto/deployment_pb2.py +++ b/grafeas/grafeas/grafeas_v1/proto/deployment_pb2.py @@ -28,7 +28,7 @@ serialized_pb=_b( '\n!grafeas_v1/proto/deployment.proto\x12\ngrafeas.v1\x1a\x1fgoogle/protobuf/timestamp.proto"&\n\x0e\x44\x65ploymentNote\x12\x14\n\x0cresource_uri\x18\x01 \x03(\t"\xc7\x02\n\x14\x44\x65ploymentOccurrence\x12\x12\n\nuser_email\x18\x01 \x01(\t\x12/\n\x0b\x64\x65ploy_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x31\n\rundeploy_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x0e\n\x06\x63onfig\x18\x04 \x01(\t\x12\x0f\n\x07\x61\x64\x64ress\x18\x05 \x01(\t\x12\x14\n\x0cresource_uri\x18\x06 \x03(\t\x12;\n\x08platform\x18\x07 \x01(\x0e\x32).grafeas.v1.DeploymentOccurrence.Platform"C\n\x08Platform\x12\x18\n\x14PLATFORM_UNSPECIFIED\x10\x00\x12\x07\n\x03GKE\x10\x01\x12\x08\n\x04\x46LEX\x10\x02\x12\n\n\x06\x43USTOM\x10\x03\x42Q\n\rio.grafeas.v1P\x01Z8google.golang.org/genproto/googleapis/grafeas/v1;grafeas\xa2\x02\x03GRAb\x06proto3' ), - dependencies=[google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR], + dependencies=[google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,], ) @@ -87,7 +87,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -238,7 +238,7 @@ ], extensions=[], nested_types=[], - enum_types=[_DEPLOYMENTOCCURRENCE_PLATFORM], + enum_types=[_DEPLOYMENTOCCURRENCE_PLATFORM,], serialized_options=None, is_extendable=False, syntax="proto3", diff --git a/grafeas/grafeas/grafeas_v1/proto/discovery_pb2.py b/grafeas/grafeas/grafeas_v1/proto/discovery_pb2.py index e8776e44b0a7..216f8db6ba7e 100644 --- a/grafeas/grafeas/grafeas_v1/proto/discovery_pb2.py +++ b/grafeas/grafeas/grafeas_v1/proto/discovery_pb2.py @@ -136,7 +136,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], diff --git a/grafeas/grafeas/grafeas_v1/proto/grafeas_pb2.py b/grafeas/grafeas/grafeas_v1/proto/grafeas_pb2.py index 89af50706aa9..babb2045386f 100644 --- a/grafeas/grafeas/grafeas_v1/proto/grafeas_pb2.py +++ b/grafeas/grafeas/grafeas_v1/proto/grafeas_pb2.py @@ -344,7 +344,7 @@ index=0, containing_type=None, fields=[], - ) + ), ], serialized_start=474, serialized_end=1079, @@ -661,7 +661,7 @@ index=0, containing_type=None, fields=[], - ) + ), ], serialized_start=1082, serialized_end=1753, @@ -692,7 +692,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -881,7 +881,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -1052,7 +1052,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -1091,7 +1091,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -1280,7 +1280,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -1696,7 +1696,7 @@ ), ], extensions=[], - nested_types=[_BATCHCREATENOTESREQUEST_NOTESENTRY], + nested_types=[_BATCHCREATENOTESREQUEST_NOTESENTRY,], enum_types=[], serialized_options=None, is_extendable=False, @@ -1732,7 +1732,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -1828,7 +1828,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], diff --git a/grafeas/grafeas/grafeas_v1/proto/package_pb2.py b/grafeas/grafeas/grafeas_v1/proto/package_pb2.py index dbcb619010c9..519f2aa60187 100644 --- a/grafeas/grafeas/grafeas_v1/proto/package_pb2.py +++ b/grafeas/grafeas/grafeas_v1/proto/package_pb2.py @@ -510,7 +510,7 @@ ], extensions=[], nested_types=[], - enum_types=[_VERSION_VERSIONKIND], + enum_types=[_VERSION_VERSIONKIND,], serialized_options=None, is_extendable=False, syntax="proto3", diff --git a/grafeas/grafeas/grafeas_v1/proto/provenance_pb2.py b/grafeas/grafeas/grafeas_v1/proto/provenance_pb2.py index 412c42c8db2f..3f25bbc15af8 100644 --- a/grafeas/grafeas/grafeas_v1/proto/provenance_pb2.py +++ b/grafeas/grafeas/grafeas_v1/proto/provenance_pb2.py @@ -28,7 +28,7 @@ serialized_pb=_b( '\n!grafeas_v1/proto/provenance.proto\x12\ngrafeas.v1\x1a\x1fgoogle/protobuf/timestamp.proto"\x90\x04\n\x0f\x42uildProvenance\x12\n\n\x02id\x18\x01 \x01(\t\x12\x12\n\nproject_id\x18\x02 \x01(\t\x12%\n\x08\x63ommands\x18\x03 \x03(\x0b\x32\x13.grafeas.v1.Command\x12-\n\x0f\x62uilt_artifacts\x18\x04 \x03(\x0b\x32\x14.grafeas.v1.Artifact\x12/\n\x0b\x63reate_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12.\n\nstart_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x0f\n\x07\x63reator\x18\x08 \x01(\t\x12\x10\n\x08logs_uri\x18\t \x01(\t\x12-\n\x11source_provenance\x18\n \x01(\x0b\x32\x12.grafeas.v1.Source\x12\x12\n\ntrigger_id\x18\x0b \x01(\t\x12\x44\n\rbuild_options\x18\x0c \x03(\x0b\x32-.grafeas.v1.BuildProvenance.BuildOptionsEntry\x12\x17\n\x0f\x62uilder_version\x18\r \x01(\t\x1a\x33\n\x11\x42uildOptionsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x95\x02\n\x06Source\x12#\n\x1b\x61rtifact_storage_source_uri\x18\x01 \x01(\t\x12\x37\n\x0b\x66ile_hashes\x18\x02 \x03(\x0b\x32".grafeas.v1.Source.FileHashesEntry\x12*\n\x07\x63ontext\x18\x03 \x01(\x0b\x32\x19.grafeas.v1.SourceContext\x12\x36\n\x13\x61\x64\x64itional_contexts\x18\x04 \x03(\x0b\x32\x19.grafeas.v1.SourceContext\x1aI\n\x0f\x46ileHashesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12%\n\x05value\x18\x02 \x01(\x0b\x32\x16.grafeas.v1.FileHashes:\x02\x38\x01"1\n\nFileHashes\x12#\n\tfile_hash\x18\x01 \x03(\x0b\x32\x10.grafeas.v1.Hash"#\n\x04Hash\x12\x0c\n\x04type\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x0c"]\n\x07\x43ommand\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0b\n\x03\x65nv\x18\x02 \x03(\t\x12\x0c\n\x04\x61rgs\x18\x03 \x03(\t\x12\x0b\n\x03\x64ir\x18\x04 \x01(\t\x12\n\n\x02id\x18\x05 \x01(\t\x12\x10\n\x08wait_for\x18\x06 \x03(\t"7\n\x08\x41rtifact\x12\x10\n\x08\x63hecksum\x18\x01 \x01(\t\x12\n\n\x02id\x18\x02 \x01(\t\x12\r\n\x05names\x18\x03 \x03(\t"\x9a\x02\n\rSourceContext\x12\x38\n\ncloud_repo\x18\x01 \x01(\x0b\x32".grafeas.v1.CloudRepoSourceContextH\x00\x12\x31\n\x06gerrit\x18\x02 \x01(\x0b\x32\x1f.grafeas.v1.GerritSourceContextH\x00\x12+\n\x03git\x18\x03 \x01(\x0b\x32\x1c.grafeas.v1.GitSourceContextH\x00\x12\x35\n\x06labels\x18\x04 \x03(\x0b\x32%.grafeas.v1.SourceContext.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\t\n\x07\x63ontext"\x8a\x01\n\x0c\x41liasContext\x12+\n\x04kind\x18\x01 \x01(\x0e\x32\x1d.grafeas.v1.AliasContext.Kind\x12\x0c\n\x04name\x18\x02 \x01(\t"?\n\x04Kind\x12\x14\n\x10KIND_UNSPECIFIED\x10\x00\x12\t\n\x05\x46IXED\x10\x01\x12\x0b\n\x07MOVABLE\x10\x02\x12\t\n\x05OTHER\x10\x04"\x93\x01\n\x16\x43loudRepoSourceContext\x12#\n\x07repo_id\x18\x01 \x01(\x0b\x32\x12.grafeas.v1.RepoId\x12\x15\n\x0brevision_id\x18\x02 \x01(\tH\x00\x12\x31\n\ralias_context\x18\x03 \x01(\x0b\x32\x18.grafeas.v1.AliasContextH\x00\x42\n\n\x08revision"\x95\x01\n\x13GerritSourceContext\x12\x10\n\x08host_uri\x18\x01 \x01(\t\x12\x16\n\x0egerrit_project\x18\x02 \x01(\t\x12\x15\n\x0brevision_id\x18\x03 \x01(\tH\x00\x12\x31\n\ralias_context\x18\x04 \x01(\x0b\x32\x18.grafeas.v1.AliasContextH\x00\x42\n\n\x08revision"4\n\x10GitSourceContext\x12\x0b\n\x03url\x18\x01 \x01(\t\x12\x13\n\x0brevision_id\x18\x02 \x01(\t"S\n\x06RepoId\x12\x34\n\x0fproject_repo_id\x18\x01 \x01(\x0b\x32\x19.grafeas.v1.ProjectRepoIdH\x00\x12\r\n\x03uid\x18\x02 \x01(\tH\x00\x42\x04\n\x02id"6\n\rProjectRepoId\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x11\n\trepo_name\x18\x02 \x01(\tBQ\n\rio.grafeas.v1P\x01Z8google.golang.org/genproto/googleapis/grafeas/v1;grafeas\xa2\x02\x03GRAb\x06proto3' ), - dependencies=[google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR], + dependencies=[google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,], ) @@ -362,7 +362,7 @@ ), ], extensions=[], - nested_types=[_BUILDPROVENANCE_BUILDOPTIONSENTRY], + nested_types=[_BUILDPROVENANCE_BUILDOPTIONSENTRY,], enum_types=[], serialized_options=None, is_extendable=False, @@ -511,7 +511,7 @@ ), ], extensions=[], - nested_types=[_SOURCE_FILEHASHESENTRY], + nested_types=[_SOURCE_FILEHASHESENTRY,], enum_types=[], serialized_options=None, is_extendable=False, @@ -547,7 +547,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -960,7 +960,7 @@ ), ], extensions=[], - nested_types=[_SOURCECONTEXT_LABELSENTRY], + nested_types=[_SOURCECONTEXT_LABELSENTRY,], enum_types=[], serialized_options=None, is_extendable=False, @@ -973,7 +973,7 @@ index=0, containing_type=None, fields=[], - ) + ), ], serialized_start=1134, serialized_end=1416, @@ -1026,7 +1026,7 @@ ], extensions=[], nested_types=[], - enum_types=[_ALIASCONTEXT_KIND], + enum_types=[_ALIASCONTEXT_KIND,], serialized_options=None, is_extendable=False, syntax="proto3", @@ -1113,7 +1113,7 @@ index=0, containing_type=None, fields=[], - ) + ), ], serialized_start=1560, serialized_end=1707, @@ -1214,7 +1214,7 @@ index=0, containing_type=None, fields=[], - ) + ), ], serialized_start=1710, serialized_end=1859, @@ -1336,7 +1336,7 @@ index=0, containing_type=None, fields=[], - ) + ), ], serialized_start=1915, serialized_end=1998, diff --git a/grafeas/grafeas/grafeas_v1/proto/vulnerability_pb2.py b/grafeas/grafeas/grafeas_v1/proto/vulnerability_pb2.py index 612904b4a998..ad905b2af64e 100644 --- a/grafeas/grafeas/grafeas_v1/proto/vulnerability_pb2.py +++ b/grafeas/grafeas/grafeas_v1/proto/vulnerability_pb2.py @@ -440,7 +440,7 @@ ), ], extensions=[], - nested_types=[_VULNERABILITYNOTE_WINDOWSDETAIL_KNOWLEDGEBASE], + nested_types=[_VULNERABILITYNOTE_WINDOWSDETAIL_KNOWLEDGEBASE,], enum_types=[], serialized_options=None, is_extendable=False, @@ -550,7 +550,7 @@ ), ], extensions=[], - nested_types=[_VULNERABILITYNOTE_DETAIL, _VULNERABILITYNOTE_WINDOWSDETAIL], + nested_types=[_VULNERABILITYNOTE_DETAIL, _VULNERABILITYNOTE_WINDOWSDETAIL,], enum_types=[], serialized_options=None, is_extendable=False, @@ -879,7 +879,7 @@ ), ], extensions=[], - nested_types=[_VULNERABILITYOCCURRENCE_PACKAGEISSUE], + nested_types=[_VULNERABILITYOCCURRENCE_PACKAGEISSUE,], enum_types=[], serialized_options=None, is_extendable=False, diff --git a/grafeas/grafeas/grafeas_v1/types.py b/grafeas/grafeas/grafeas_v1/types.py index 253b6cadc65f..c6bbfcd9f902 100644 --- a/grafeas/grafeas/grafeas_v1/types.py +++ b/grafeas/grafeas/grafeas_v1/types.py @@ -38,7 +38,13 @@ from grafeas.grafeas_v1.proto import vulnerability_pb2 -_shared_modules = [any_pb2, empty_pb2, field_mask_pb2, timestamp_pb2, status_pb2] +_shared_modules = [ + any_pb2, + empty_pb2, + field_mask_pb2, + timestamp_pb2, + status_pb2, +] _local_modules = [ attestation_pb2, diff --git a/grafeas/setup.py b/grafeas/setup.py index 62fbae2dc527..4c351d7a7101 100644 --- a/grafeas/setup.py +++ b/grafeas/setup.py @@ -21,7 +21,7 @@ name = "grafeas" description = "Grafeas API client library" -version = "0.2.0" +version = "0.3.0" release_status = "Development Status :: 3 - Alpha" dependencies = [ "google-api-core[grpc] >= 1.14.0, < 2.0.0dev", diff --git a/grafeas/synth.metadata b/grafeas/synth.metadata index 0c0938c03e84..29ee97018821 100644 --- a/grafeas/synth.metadata +++ b/grafeas/synth.metadata @@ -1,25 +1,26 @@ { - "updateTime": "2019-08-06T18:36:30.465284Z", + "updateTime": "2019-10-29T12:26:28.238846Z", "sources": [ { "generator": { "name": "artman", - "version": "0.32.1", - "dockerImage": "googleapis/artman@sha256:a684d40ba9a4e15946f5f2ca6b4bd9fe301192f522e9de4fff622118775f309b" + "version": "0.40.3", + "dockerImage": "googleapis/artman@sha256:c805f50525f5f557886c94ab76f56eaa09cb1da58c3ee95111fd34259376621a" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "53e641721f965a485af64331cfea9e5522294d78" + "sha": "532773acbed8d09451dafb3d403ab1823e6a6e1e", + "internalRef": "277177415" } }, { "template": { "name": "python_library", "origin": "synthtool.gcp", - "version": "2019.5.2" + "version": "2019.10.17" } } ], diff --git a/iam/docs/conf.py b/iam/docs/conf.py index e31d228822a8..0c61d6ee8eba 100644 --- a/iam/docs/conf.py +++ b/iam/docs/conf.py @@ -338,7 +338,7 @@ "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("https://requests.kennethreitz.org/en/stable/", None), + "requests": ("https://requests.kennethreitz.org/en/master/", None), "fastavro": ("https://fastavro.readthedocs.io/en/stable/", None), "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), } diff --git a/iam/synth.metadata b/iam/synth.metadata index d7a2c8d24679..d8daa260fb6a 100644 --- a/iam/synth.metadata +++ b/iam/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-08-03T12:12:16.255697Z", + "updateTime": "2019-10-05T12:25:22.123031Z", "sources": [ { "generator": { "name": "artman", - "version": "0.32.1", - "dockerImage": "googleapis/artman@sha256:a684d40ba9a4e15946f5f2ca6b4bd9fe301192f522e9de4fff622118775f309b" + "version": "0.38.0", + "dockerImage": "googleapis/artman@sha256:0d2f8d429110aeb8d82df6550ef4ede59d40df9062d260a1580fce688b0512bf" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "7b212a8d2319cd81a7b6942c25dbf4550480a06c", - "internalRef": "261339454" + "sha": "ceb8e2fb12f048cc94caae532ef0b4cf026a78f3", + "internalRef": "272971705" } }, { diff --git a/iot/.repo-metadata.json b/iot/.repo-metadata.json index 0fd78d6b99d4..fb7140f7f4d2 100644 --- a/iot/.repo-metadata.json +++ b/iot/.repo-metadata.json @@ -2,7 +2,7 @@ "name": "cloudiot", "name_pretty": "Google Cloud Internet of Things (IoT) Core", "product_documentation": "https://cloud.google.com/iot", - "client_documentation": "https://googleapis.dev/python/iot/latest", + "client_documentation": "https://googleapis.dev/python/cloudiot/latest", "issue_tracker": "https://issuetracker.google.com/issues?q=status:open%20componentid:310170", "release_level": "alpha", "language": "python", diff --git a/iot/docs/conf.py b/iot/docs/conf.py index 102500ef6b6c..44d871cfa605 100644 --- a/iot/docs/conf.py +++ b/iot/docs/conf.py @@ -338,7 +338,7 @@ "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("https://requests.kennethreitz.org/en/stable/", None), + "requests": ("https://requests.kennethreitz.org/en/master/", None), "fastavro": ("https://fastavro.readthedocs.io/en/stable/", None), "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), } diff --git a/iot/synth.metadata b/iot/synth.metadata index 6306a7aaffc6..f93b32daed5a 100644 --- a/iot/synth.metadata +++ b/iot/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-08-06T12:28:01.870632Z", + "updateTime": "2019-10-05T12:26:06.841344Z", "sources": [ { "generator": { "name": "artman", - "version": "0.32.1", - "dockerImage": "googleapis/artman@sha256:a684d40ba9a4e15946f5f2ca6b4bd9fe301192f522e9de4fff622118775f309b" + "version": "0.38.0", + "dockerImage": "googleapis/artman@sha256:0d2f8d429110aeb8d82df6550ef4ede59d40df9062d260a1580fce688b0512bf" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "e699b0cba64ffddfae39633417180f1f65875896", - "internalRef": "261759677" + "sha": "ceb8e2fb12f048cc94caae532ef0b4cf026a78f3", + "internalRef": "272971705" } }, { diff --git a/irm/docs/conf.py b/irm/docs/conf.py index fa8110a1a7cd..f479af751a51 100644 --- a/irm/docs/conf.py +++ b/irm/docs/conf.py @@ -338,7 +338,7 @@ "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("https://requests.kennethreitz.org/en/stable/", None), + "requests": ("https://requests.kennethreitz.org/en/master/", None), "fastavro": ("https://fastavro.readthedocs.io/en/stable/", None), "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), } diff --git a/irm/synth.metadata b/irm/synth.metadata index 4650d21844b1..d6539a1bb446 100644 --- a/irm/synth.metadata +++ b/irm/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-08-06T12:28:51.028503Z", + "updateTime": "2019-10-05T12:26:52.725480Z", "sources": [ { "generator": { "name": "artman", - "version": "0.32.1", - "dockerImage": "googleapis/artman@sha256:a684d40ba9a4e15946f5f2ca6b4bd9fe301192f522e9de4fff622118775f309b" + "version": "0.38.0", + "dockerImage": "googleapis/artman@sha256:0d2f8d429110aeb8d82df6550ef4ede59d40df9062d260a1580fce688b0512bf" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "e699b0cba64ffddfae39633417180f1f65875896", - "internalRef": "261759677" + "sha": "ceb8e2fb12f048cc94caae532ef0b4cf026a78f3", + "internalRef": "272971705" } }, { diff --git a/language/google/cloud/language_v1beta2/gapic/enums.py b/language/google/cloud/language_v1beta2/gapic/enums.py index aa68fa4911ec..0d3cf1c591ed 100644 --- a/language/google/cloud/language_v1beta2/gapic/enums.py +++ b/language/google/cloud/language_v1beta2/gapic/enums.py @@ -34,7 +34,7 @@ class EncodingType(enum.IntEnum): based on the UTF-8 encoding of the input. C++ and Go are examples of languages that use this encoding natively. UTF16 (int): Encoding-dependent information (such as ``begin_offset``) is calculated - based on the UTF-16 encoding of the input. Java and Javascript are + based on the UTF-16 encoding of the input. Java and JavaScript are examples of languages that use this encoding natively. UTF32 (int): Encoding-dependent information (such as ``begin_offset``) is calculated based on the UTF-32 encoding of the input. Python is an example of a @@ -242,7 +242,10 @@ class Type(enum.IntEnum): class Entity(object): class Type(enum.IntEnum): """ - The type of the entity. + The type of the entity. For most entity types, the associated metadata + is a Wikipedia URL (``wikipedia_url``) and Knowledge Graph MID + (``mid``). The table below lists the associated fields for entities that + have different metadata. Attributes: UNKNOWN (int): Unknown @@ -250,9 +253,49 @@ class Type(enum.IntEnum): LOCATION (int): Location ORGANIZATION (int): Organization EVENT (int): Event - WORK_OF_ART (int): Work of art - CONSUMER_GOOD (int): Consumer goods - OTHER (int): Other types + WORK_OF_ART (int): Artwork + CONSUMER_GOOD (int): Consumer product + OTHER (int): Other types of entities + PHONE_NUMBER (int): Phone number + + The metadata lists the phone number, formatted according to local + convention, plus whichever additional elements appear in the text: + + - ``number`` - the actual number, broken down into sections as per + local convention + - ``national_prefix`` - country code, if detected + - ``area_code`` - region or area code, if detected + - ``extension`` - phone extension (to be dialed after connection), if + detected + ADDRESS (int): Address + + The metadata identifies the street number and locality plus whichever + additional elements appear in the text: + + - ``street_number`` - street number + - ``locality`` - city or town + - ``street_name`` - street/route name, if detected + - ``postal_code`` - postal code, if detected + - ``country`` - country, if detected< + - ``broad_region`` - administrative area, such as the state, if + detected + - ``narrow_region`` - smaller administrative area, such as county, if + detected + - ``sublocality`` - used in Asian addresses to demark a district within + a city, if detected + DATE (int): Date + + The metadata identifies the components of the date: + + - ``year`` - four digit year, if detected + - ``month`` - two digit month number, if detected + - ``day`` - two digit day number, if detected + NUMBER (int): Number + + The metadata is the number itself. + PRICE (int): Price + + The metadata identifies the ``value`` and ``currency``. """ UNKNOWN = 0 @@ -263,6 +306,11 @@ class Type(enum.IntEnum): WORK_OF_ART = 5 CONSUMER_GOOD = 6 OTHER = 7 + PHONE_NUMBER = 9 + ADDRESS = 10 + DATE = 11 + NUMBER = 12 + PRICE = 13 class EntityMention(object): diff --git a/language/google/cloud/language_v1beta2/gapic/language_service_client.py b/language/google/cloud/language_v1beta2/gapic/language_service_client.py index dcb8e89d7fc6..73af0ff65ede 100644 --- a/language/google/cloud/language_v1beta2/gapic/language_service_client.py +++ b/language/google/cloud/language_v1beta2/gapic/language_service_client.py @@ -207,7 +207,7 @@ def analyze_sentiment( >>> response = client.analyze_sentiment(document) Args: - document (Union[dict, ~google.cloud.language_v1beta2.types.Document]): Input document. + document (Union[dict, ~google.cloud.language_v1beta2.types.Document]): Required. Input document. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.language_v1beta2.types.Document` @@ -274,7 +274,7 @@ def analyze_entities( >>> response = client.analyze_entities(document) Args: - document (Union[dict, ~google.cloud.language_v1beta2.types.Document]): Input document. + document (Union[dict, ~google.cloud.language_v1beta2.types.Document]): Required. Input document. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.language_v1beta2.types.Document` @@ -339,7 +339,7 @@ def analyze_entity_sentiment( >>> response = client.analyze_entity_sentiment(document) Args: - document (Union[dict, ~google.cloud.language_v1beta2.types.Document]): Input document. + document (Union[dict, ~google.cloud.language_v1beta2.types.Document]): Required. Input document. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.language_v1beta2.types.Document` @@ -405,7 +405,7 @@ def analyze_syntax( >>> response = client.analyze_syntax(document) Args: - document (Union[dict, ~google.cloud.language_v1beta2.types.Document]): Input document. + document (Union[dict, ~google.cloud.language_v1beta2.types.Document]): Required. Input document. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.language_v1beta2.types.Document` @@ -468,7 +468,7 @@ def classify_text( >>> response = client.classify_text(document) Args: - document (Union[dict, ~google.cloud.language_v1beta2.types.Document]): Input document. + document (Union[dict, ~google.cloud.language_v1beta2.types.Document]): Required. Input document. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.language_v1beta2.types.Document` @@ -534,11 +534,11 @@ def annotate_text( >>> response = client.annotate_text(document, features) Args: - document (Union[dict, ~google.cloud.language_v1beta2.types.Document]): Input document. + document (Union[dict, ~google.cloud.language_v1beta2.types.Document]): Required. Input document. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.language_v1beta2.types.Document` - features (Union[dict, ~google.cloud.language_v1beta2.types.Features]): The enabled features. + features (Union[dict, ~google.cloud.language_v1beta2.types.Features]): Required. The enabled features. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.language_v1beta2.types.Features` diff --git a/language/google/cloud/language_v1beta2/proto/language_service.proto b/language/google/cloud/language_v1beta2/proto/language_service.proto index 0263be04aedd..d0242e599759 100644 --- a/language/google/cloud/language_v1beta2/proto/language_service.proto +++ b/language/google/cloud/language_v1beta2/proto/language_service.proto @@ -1,4 +1,4 @@ -// Copyright 2017 Google Inc. +// Copyright 2019 Google LLC. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -11,15 +11,16 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. +// syntax = "proto3"; package google.cloud.language.v1beta2; import "google/api/annotations.proto"; -import "google/longrunning/operations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; import "google/protobuf/timestamp.proto"; -import "google/rpc/status.proto"; option go_package = "google.golang.org/genproto/googleapis/cloud/language/v1beta2;language"; option java_multiple_files = true; @@ -29,36 +30,42 @@ option java_package = "com.google.cloud.language.v1beta2"; // Provides text analysis operations such as sentiment analysis and entity // recognition. service LanguageService { + option (google.api.default_host) = "language.googleapis.com"; + option (google.api.oauth_scopes) = + "https://www.googleapis.com/auth/cloud-language," + "https://www.googleapis.com/auth/cloud-platform"; + // Analyzes the sentiment of the provided text. - rpc AnalyzeSentiment(AnalyzeSentimentRequest) - returns (AnalyzeSentimentResponse) { + rpc AnalyzeSentiment(AnalyzeSentimentRequest) returns (AnalyzeSentimentResponse) { option (google.api.http) = { post: "/v1beta2/documents:analyzeSentiment" body: "*" }; + option (google.api.method_signature) = "document,encoding_type"; + option (google.api.method_signature) = "document"; } // Finds named entities (currently proper names and common nouns) in the text // along with entity types, salience, mentions for each entity, and // other properties. - rpc AnalyzeEntities(AnalyzeEntitiesRequest) - returns (AnalyzeEntitiesResponse) { + rpc AnalyzeEntities(AnalyzeEntitiesRequest) returns (AnalyzeEntitiesResponse) { option (google.api.http) = { post: "/v1beta2/documents:analyzeEntities" body: "*" }; + option (google.api.method_signature) = "document,encoding_type"; + option (google.api.method_signature) = "document"; } - // Finds entities, similar to - // [AnalyzeEntities][google.cloud.language.v1beta2.LanguageService.AnalyzeEntities] - // in the text and analyzes sentiment associated with each entity and its - // mentions. - rpc AnalyzeEntitySentiment(AnalyzeEntitySentimentRequest) - returns (AnalyzeEntitySentimentResponse) { + // Finds entities, similar to [AnalyzeEntities][google.cloud.language.v1beta2.LanguageService.AnalyzeEntities] in the text and analyzes + // sentiment associated with each entity and its mentions. + rpc AnalyzeEntitySentiment(AnalyzeEntitySentimentRequest) returns (AnalyzeEntitySentimentResponse) { option (google.api.http) = { post: "/v1beta2/documents:analyzeEntitySentiment" body: "*" }; + option (google.api.method_signature) = "document,encoding_type"; + option (google.api.method_signature) = "document"; } // Analyzes the syntax of the text and provides sentence boundaries and @@ -69,6 +76,8 @@ service LanguageService { post: "/v1beta2/documents:analyzeSyntax" body: "*" }; + option (google.api.method_signature) = "document,encoding_type"; + option (google.api.method_signature) = "document"; } // Classifies a document into categories. @@ -77,6 +86,7 @@ service LanguageService { post: "/v1beta2/documents:classifyText" body: "*" }; + option (google.api.method_signature) = "document"; } // A convenience method that provides all syntax, sentiment, entity, and @@ -86,6 +96,8 @@ service LanguageService { post: "/v1beta2/documents:annotateText" body: "*" }; + option (google.api.method_signature) = "document,features,encoding_type"; + option (google.api.method_signature) = "document,features"; } } @@ -113,6 +125,7 @@ message Document { // Google Cloud Storage URI. oneof source { // The content of the input in string format. + // Cloud audit logging exempt since it is based on user data. string content = 2; // The Google Cloud Storage URI where the file content is located. @@ -139,8 +152,8 @@ message Sentence { TextSpan text = 1; // For calls to [AnalyzeSentiment][] or if - // [AnnotateTextRequest.Features.extract_document_sentiment][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_document_sentiment] - // is set to true, this field will contain the sentiment for the sentence. + // [AnnotateTextRequest.Features.extract_document_sentiment][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_document_sentiment] is set to + // true, this field will contain the sentiment for the sentence. Sentiment sentiment = 2; } @@ -148,7 +161,10 @@ message Sentence { // a person, an organization, or location. The API associates information, such // as salience and mentions, with entities. message Entity { - // The type of the entity. + // The type of the entity. For most entity types, the associated metadata is a + // Wikipedia URL (`wikipedia_url`) and Knowledge Graph MID (`mid`). The table + // below lists the associated fields for entities that have different + // metadata. enum Type { // Unknown UNKNOWN = 0; @@ -165,14 +181,63 @@ message Entity { // Event EVENT = 4; - // Work of art + // Artwork WORK_OF_ART = 5; - // Consumer goods + // Consumer product CONSUMER_GOOD = 6; - // Other types + // Other types of entities OTHER = 7; + + // Phone number + // + // The metadata lists the phone number, formatted according to local + // convention, plus whichever additional elements appear in the text: + // + // * `number` - the actual number, broken down into sections as per local + // convention + // * `national_prefix` - country code, if detected + // * `area_code` - region or area code, if detected + // * `extension` - phone extension (to be dialed after connection), if + // detected + PHONE_NUMBER = 9; + + // Address + // + // The metadata identifies the street number and locality plus whichever + // additional elements appear in the text: + // + // * `street_number` - street number + // * `locality` - city or town + // * `street_name` - street/route name, if detected + // * `postal_code` - postal code, if detected + // * `country` - country, if detected< + // * `broad_region` - administrative area, such as the state, if detected + // * `narrow_region` - smaller administrative area, such as county, if + // detected + // * `sublocality` - used in Asian addresses to demark a district within a + // city, if detected + ADDRESS = 10; + + // Date + // + // The metadata identifies the components of the date: + // + // * `year` - four digit year, if detected + // * `month` - two digit month number, if detected + // * `day` - two digit day number, if detected + DATE = 11; + + // Number + // + // The metadata is the number itself. + NUMBER = 12; + + // Price + // + // The metadata identifies the `value` and `currency`. + PRICE = 13; } // The representative name for the entity. @@ -183,8 +248,9 @@ message Entity { // Metadata associated with the entity. // - // Currently, Wikipedia URLs and Knowledge Graph MIDs are provided, if - // available. The associated keys are "wikipedia_url" and "mid", respectively. + // For most entity types, the metadata is a Wikipedia URL (`wikipedia_url`) + // and Knowledge Graph MID (`mid`), if they are available. For the metadata + // associated with other entity types, see the Type table below. map metadata = 3; // The salience score associated with the entity in the [0, 1.0] range. @@ -200,12 +266,38 @@ message Entity { repeated EntityMention mentions = 5; // For calls to [AnalyzeEntitySentiment][] or if - // [AnnotateTextRequest.Features.extract_entity_sentiment][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_entity_sentiment] - // is set to true, this field will contain the aggregate sentiment expressed - // for this entity in the provided document. + // [AnnotateTextRequest.Features.extract_entity_sentiment][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_entity_sentiment] is set to + // true, this field will contain the aggregate sentiment expressed for this + // entity in the provided document. Sentiment sentiment = 6; } +// Represents the text encoding that the caller uses to process the output. +// Providing an `EncodingType` is recommended because the API provides the +// beginning offsets for various outputs, such as tokens and mentions, and +// languages that natively use different text encodings may access offsets +// differently. +enum EncodingType { + // If `EncodingType` is not specified, encoding-dependent information (such as + // `begin_offset`) will be set at `-1`. + NONE = 0; + + // Encoding-dependent information (such as `begin_offset`) is calculated based + // on the UTF-8 encoding of the input. C++ and Go are examples of languages + // that use this encoding natively. + UTF8 = 1; + + // Encoding-dependent information (such as `begin_offset`) is calculated based + // on the UTF-16 encoding of the input. Java and JavaScript are examples of + // languages that use this encoding natively. + UTF16 = 2; + + // Encoding-dependent information (such as `begin_offset`) is calculated based + // on the UTF-32 encoding of the input. Python is an example of a language + // that uses this encoding natively. + UTF32 = 3; +} + // Represents the smallest syntactic building block of the text. message Token { // The token text. @@ -223,6 +315,7 @@ message Token { // Represents the feeling associated with the entire text or entities in // the text. +// Next ID: 6 message Sentiment { // A non-negative number in the [0, +inf) range, which represents // the absolute magnitude of sentiment regardless of score (positive or @@ -849,9 +942,9 @@ message EntityMention { Type type = 2; // For calls to [AnalyzeEntitySentiment][] or if - // [AnnotateTextRequest.Features.extract_entity_sentiment][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_entity_sentiment] - // is set to true, this field will contain the sentiment expressed for this - // mention of the entity in the provided document. + // [AnnotateTextRequest.Features.extract_entity_sentiment][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_entity_sentiment] is set to + // true, this field will contain the sentiment expressed for this mention of + // the entity in the provided document. Sentiment sentiment = 3; } @@ -861,15 +954,14 @@ message TextSpan { string content = 1; // The API calculates the beginning offset of the content in the original - // document according to the - // [EncodingType][google.cloud.language.v1beta2.EncodingType] specified in the - // API request. + // document according to the [EncodingType][google.cloud.language.v1beta2.EncodingType] specified in the API request. int32 begin_offset = 2; } // Represents a category returned from the text classifier. message ClassificationCategory { - // The name of the category representing the document. + // The name of the category representing the document, from the [predefined + // taxonomy](/natural-language/docs/categories). string name = 1; // The classifier's confidence of the category. Number represents how certain @@ -879,8 +971,8 @@ message ClassificationCategory { // The sentiment analysis request message. message AnalyzeSentimentRequest { - // Input document. - Document document = 1; + // Required. Input document. + Document document = 1 [(google.api.field_behavior) = REQUIRED]; // The encoding type used by the API to calculate sentence offsets for the // sentence sentiment. @@ -894,8 +986,7 @@ message AnalyzeSentimentResponse { // The language of the text, which will be the same as the language specified // in the request or, if not specified, the automatically-detected language. - // See [Document.language][google.cloud.language.v1beta2.Document.language] - // field for more details. + // See [Document.language][google.cloud.language.v1beta2.Document.language] field for more details. string language = 2; // The sentiment for all the sentences in the document. @@ -904,8 +995,8 @@ message AnalyzeSentimentResponse { // The entity-level sentiment analysis request message. message AnalyzeEntitySentimentRequest { - // Input document. - Document document = 1; + // Required. Input document. + Document document = 1 [(google.api.field_behavior) = REQUIRED]; // The encoding type used by the API to calculate offsets. EncodingType encoding_type = 2; @@ -918,15 +1009,14 @@ message AnalyzeEntitySentimentResponse { // The language of the text, which will be the same as the language specified // in the request or, if not specified, the automatically-detected language. - // See [Document.language][google.cloud.language.v1beta2.Document.language] - // field for more details. + // See [Document.language][google.cloud.language.v1beta2.Document.language] field for more details. string language = 2; } // The entity analysis request message. message AnalyzeEntitiesRequest { - // Input document. - Document document = 1; + // Required. Input document. + Document document = 1 [(google.api.field_behavior) = REQUIRED]; // The encoding type used by the API to calculate offsets. EncodingType encoding_type = 2; @@ -939,15 +1029,14 @@ message AnalyzeEntitiesResponse { // The language of the text, which will be the same as the language specified // in the request or, if not specified, the automatically-detected language. - // See [Document.language][google.cloud.language.v1beta2.Document.language] - // field for more details. + // See [Document.language][google.cloud.language.v1beta2.Document.language] field for more details. string language = 2; } // The syntax analysis request message. message AnalyzeSyntaxRequest { - // Input document. - Document document = 1; + // Required. Input document. + Document document = 1 [(google.api.field_behavior) = REQUIRED]; // The encoding type used by the API to calculate offsets. EncodingType encoding_type = 2; @@ -963,15 +1052,14 @@ message AnalyzeSyntaxResponse { // The language of the text, which will be the same as the language specified // in the request or, if not specified, the automatically-detected language. - // See [Document.language][google.cloud.language.v1beta2.Document.language] - // field for more details. + // See [Document.language][google.cloud.language.v1beta2.Document.language] field for more details. string language = 3; } // The document classification request message. message ClassifyTextRequest { - // Input document. - Document document = 1; + // Required. Input document. + Document document = 1 [(google.api.field_behavior) = REQUIRED]; } // The document classification response message. @@ -985,6 +1073,7 @@ message ClassifyTextResponse { message AnnotateTextRequest { // All available features for sentiment, syntax, and semantic analysis. // Setting each one to true will enable that specific analysis for the input. + // Next ID: 10 message Features { // Extract syntax information. bool extract_syntax = 1; @@ -998,15 +1087,17 @@ message AnnotateTextRequest { // Extract entities and their associated sentiment. bool extract_entity_sentiment = 4; - // Classify the full document into categories. + // Classify the full document into categories. If this is true, + // the API will use the default model which classifies into a + // [predefined taxonomy](/natural-language/docs/categories). bool classify_text = 6; } - // Input document. - Document document = 1; + // Required. Input document. + Document document = 1 [(google.api.field_behavior) = REQUIRED]; - // The enabled features. - Features features = 2; + // Required. The enabled features. + Features features = 2 [(google.api.field_behavior) = REQUIRED]; // The encoding type used by the API to calculate offsets. EncodingType encoding_type = 3; @@ -1034,36 +1125,9 @@ message AnnotateTextResponse { // The language of the text, which will be the same as the language specified // in the request or, if not specified, the automatically-detected language. - // See [Document.language][google.cloud.language.v1beta2.Document.language] - // field for more details. + // See [Document.language][google.cloud.language.v1beta2.Document.language] field for more details. string language = 5; // Categories identified in the input document. repeated ClassificationCategory categories = 6; } - -// Represents the text encoding that the caller uses to process the output. -// Providing an `EncodingType` is recommended because the API provides the -// beginning offsets for various outputs, such as tokens and mentions, and -// languages that natively use different text encodings may access offsets -// differently. -enum EncodingType { - // If `EncodingType` is not specified, encoding-dependent information (such as - // `begin_offset`) will be set at `-1`. - NONE = 0; - - // Encoding-dependent information (such as `begin_offset`) is calculated based - // on the UTF-8 encoding of the input. C++ and Go are examples of languages - // that use this encoding natively. - UTF8 = 1; - - // Encoding-dependent information (such as `begin_offset`) is calculated based - // on the UTF-16 encoding of the input. Java and Javascript are examples of - // languages that use this encoding natively. - UTF16 = 2; - - // Encoding-dependent information (such as `begin_offset`) is calculated based - // on the UTF-32 encoding of the input. Python is an example of a language - // that uses this encoding natively. - UTF32 = 3; -} diff --git a/language/google/cloud/language_v1beta2/proto/language_service_pb2.py b/language/google/cloud/language_v1beta2/proto/language_service_pb2.py index 6e2ce20c6b5e..8c9068df2910 100644 --- a/language/google/cloud/language_v1beta2/proto/language_service_pb2.py +++ b/language/google/cloud/language_v1beta2/proto/language_service_pb2.py @@ -17,11 +17,9 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.longrunning import ( - operations_pb2 as google_dot_longrunning_dot_operations__pb2, -) +from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -from google.rpc import status_pb2 as google_dot_rpc_dot_status__pb2 DESCRIPTOR = _descriptor.FileDescriptor( @@ -32,13 +30,13 @@ "\n!com.google.cloud.language.v1beta2B\024LanguageServiceProtoP\001ZEgoogle.golang.org/genproto/googleapis/cloud/language/v1beta2;language" ), serialized_pb=_b( - '\n:google/cloud/language_v1beta2/proto/language_service.proto\x12\x1dgoogle.cloud.language.v1beta2\x1a\x1cgoogle/api/annotations.proto\x1a#google/longrunning/operations.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto"\xc8\x01\n\x08\x44ocument\x12:\n\x04type\x18\x01 \x01(\x0e\x32,.google.cloud.language.v1beta2.Document.Type\x12\x11\n\x07\x63ontent\x18\x02 \x01(\tH\x00\x12\x19\n\x0fgcs_content_uri\x18\x03 \x01(\tH\x00\x12\x10\n\x08language\x18\x04 \x01(\t"6\n\x04Type\x12\x14\n\x10TYPE_UNSPECIFIED\x10\x00\x12\x0e\n\nPLAIN_TEXT\x10\x01\x12\x08\n\x04HTML\x10\x02\x42\x08\n\x06source"~\n\x08Sentence\x12\x35\n\x04text\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.TextSpan\x12;\n\tsentiment\x18\x02 \x01(\x0b\x32(.google.cloud.language.v1beta2.Sentiment"\xd2\x03\n\x06\x45ntity\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x38\n\x04type\x18\x02 \x01(\x0e\x32*.google.cloud.language.v1beta2.Entity.Type\x12\x45\n\x08metadata\x18\x03 \x03(\x0b\x32\x33.google.cloud.language.v1beta2.Entity.MetadataEntry\x12\x10\n\x08salience\x18\x04 \x01(\x02\x12>\n\x08mentions\x18\x05 \x03(\x0b\x32,.google.cloud.language.v1beta2.EntityMention\x12;\n\tsentiment\x18\x06 \x01(\x0b\x32(.google.cloud.language.v1beta2.Sentiment\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"y\n\x04Type\x12\x0b\n\x07UNKNOWN\x10\x00\x12\n\n\x06PERSON\x10\x01\x12\x0c\n\x08LOCATION\x10\x02\x12\x10\n\x0cORGANIZATION\x10\x03\x12\t\n\x05\x45VENT\x10\x04\x12\x0f\n\x0bWORK_OF_ART\x10\x05\x12\x11\n\rCONSUMER_GOOD\x10\x06\x12\t\n\x05OTHER\x10\x07"\xda\x01\n\x05Token\x12\x35\n\x04text\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.TextSpan\x12\x43\n\x0epart_of_speech\x18\x02 \x01(\x0b\x32+.google.cloud.language.v1beta2.PartOfSpeech\x12\x46\n\x0f\x64\x65pendency_edge\x18\x03 \x01(\x0b\x32-.google.cloud.language.v1beta2.DependencyEdge\x12\r\n\x05lemma\x18\x04 \x01(\t"-\n\tSentiment\x12\x11\n\tmagnitude\x18\x02 \x01(\x02\x12\r\n\x05score\x18\x03 \x01(\x02"\xdf\x10\n\x0cPartOfSpeech\x12<\n\x03tag\x18\x01 \x01(\x0e\x32/.google.cloud.language.v1beta2.PartOfSpeech.Tag\x12\x42\n\x06\x61spect\x18\x02 \x01(\x0e\x32\x32.google.cloud.language.v1beta2.PartOfSpeech.Aspect\x12>\n\x04\x63\x61se\x18\x03 \x01(\x0e\x32\x30.google.cloud.language.v1beta2.PartOfSpeech.Case\x12>\n\x04\x66orm\x18\x04 \x01(\x0e\x32\x30.google.cloud.language.v1beta2.PartOfSpeech.Form\x12\x42\n\x06gender\x18\x05 \x01(\x0e\x32\x32.google.cloud.language.v1beta2.PartOfSpeech.Gender\x12>\n\x04mood\x18\x06 \x01(\x0e\x32\x30.google.cloud.language.v1beta2.PartOfSpeech.Mood\x12\x42\n\x06number\x18\x07 \x01(\x0e\x32\x32.google.cloud.language.v1beta2.PartOfSpeech.Number\x12\x42\n\x06person\x18\x08 \x01(\x0e\x32\x32.google.cloud.language.v1beta2.PartOfSpeech.Person\x12\x42\n\x06proper\x18\t \x01(\x0e\x32\x32.google.cloud.language.v1beta2.PartOfSpeech.Proper\x12L\n\x0breciprocity\x18\n \x01(\x0e\x32\x37.google.cloud.language.v1beta2.PartOfSpeech.Reciprocity\x12@\n\x05tense\x18\x0b \x01(\x0e\x32\x31.google.cloud.language.v1beta2.PartOfSpeech.Tense\x12@\n\x05voice\x18\x0c \x01(\x0e\x32\x31.google.cloud.language.v1beta2.PartOfSpeech.Voice"\x8d\x01\n\x03Tag\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x07\n\x03\x41\x44J\x10\x01\x12\x07\n\x03\x41\x44P\x10\x02\x12\x07\n\x03\x41\x44V\x10\x03\x12\x08\n\x04\x43ONJ\x10\x04\x12\x07\n\x03\x44\x45T\x10\x05\x12\x08\n\x04NOUN\x10\x06\x12\x07\n\x03NUM\x10\x07\x12\x08\n\x04PRON\x10\x08\x12\x07\n\x03PRT\x10\t\x12\t\n\x05PUNCT\x10\n\x12\x08\n\x04VERB\x10\x0b\x12\x05\n\x01X\x10\x0c\x12\t\n\x05\x41\x46\x46IX\x10\r"O\n\x06\x41spect\x12\x12\n\x0e\x41SPECT_UNKNOWN\x10\x00\x12\x0e\n\nPERFECTIVE\x10\x01\x12\x10\n\x0cIMPERFECTIVE\x10\x02\x12\x0f\n\x0bPROGRESSIVE\x10\x03"\xf8\x01\n\x04\x43\x61se\x12\x10\n\x0c\x43\x41SE_UNKNOWN\x10\x00\x12\x0e\n\nACCUSATIVE\x10\x01\x12\r\n\tADVERBIAL\x10\x02\x12\x11\n\rCOMPLEMENTIVE\x10\x03\x12\n\n\x06\x44\x41TIVE\x10\x04\x12\x0c\n\x08GENITIVE\x10\x05\x12\x10\n\x0cINSTRUMENTAL\x10\x06\x12\x0c\n\x08LOCATIVE\x10\x07\x12\x0e\n\nNOMINATIVE\x10\x08\x12\x0b\n\x07OBLIQUE\x10\t\x12\r\n\tPARTITIVE\x10\n\x12\x11\n\rPREPOSITIONAL\x10\x0b\x12\x12\n\x0eREFLEXIVE_CASE\x10\x0c\x12\x11\n\rRELATIVE_CASE\x10\r\x12\x0c\n\x08VOCATIVE\x10\x0e"\xaf\x01\n\x04\x46orm\x12\x10\n\x0c\x46ORM_UNKNOWN\x10\x00\x12\x0c\n\x08\x41\x44NOMIAL\x10\x01\x12\r\n\tAUXILIARY\x10\x02\x12\x12\n\x0e\x43OMPLEMENTIZER\x10\x03\x12\x10\n\x0c\x46INAL_ENDING\x10\x04\x12\n\n\x06GERUND\x10\x05\x12\n\n\x06REALIS\x10\x06\x12\x0c\n\x08IRREALIS\x10\x07\x12\t\n\x05SHORT\x10\x08\x12\x08\n\x04LONG\x10\t\x12\t\n\x05ORDER\x10\n\x12\x0c\n\x08SPECIFIC\x10\x0b"E\n\x06Gender\x12\x12\n\x0eGENDER_UNKNOWN\x10\x00\x12\x0c\n\x08\x46\x45MININE\x10\x01\x12\r\n\tMASCULINE\x10\x02\x12\n\n\x06NEUTER\x10\x03"\x7f\n\x04Mood\x12\x10\n\x0cMOOD_UNKNOWN\x10\x00\x12\x14\n\x10\x43ONDITIONAL_MOOD\x10\x01\x12\x0e\n\nIMPERATIVE\x10\x02\x12\x0e\n\nINDICATIVE\x10\x03\x12\x11\n\rINTERROGATIVE\x10\x04\x12\x0b\n\x07JUSSIVE\x10\x05\x12\x0f\n\x0bSUBJUNCTIVE\x10\x06"@\n\x06Number\x12\x12\n\x0eNUMBER_UNKNOWN\x10\x00\x12\x0c\n\x08SINGULAR\x10\x01\x12\n\n\x06PLURAL\x10\x02\x12\x08\n\x04\x44UAL\x10\x03"T\n\x06Person\x12\x12\n\x0ePERSON_UNKNOWN\x10\x00\x12\t\n\x05\x46IRST\x10\x01\x12\n\n\x06SECOND\x10\x02\x12\t\n\x05THIRD\x10\x03\x12\x14\n\x10REFLEXIVE_PERSON\x10\x04"8\n\x06Proper\x12\x12\n\x0ePROPER_UNKNOWN\x10\x00\x12\n\n\x06PROPER\x10\x01\x12\x0e\n\nNOT_PROPER\x10\x02"J\n\x0bReciprocity\x12\x17\n\x13RECIPROCITY_UNKNOWN\x10\x00\x12\x0e\n\nRECIPROCAL\x10\x01\x12\x12\n\x0eNON_RECIPROCAL\x10\x02"s\n\x05Tense\x12\x11\n\rTENSE_UNKNOWN\x10\x00\x12\x15\n\x11\x43ONDITIONAL_TENSE\x10\x01\x12\n\n\x06\x46UTURE\x10\x02\x12\x08\n\x04PAST\x10\x03\x12\x0b\n\x07PRESENT\x10\x04\x12\r\n\tIMPERFECT\x10\x05\x12\x0e\n\nPLUPERFECT\x10\x06"B\n\x05Voice\x12\x11\n\rVOICE_UNKNOWN\x10\x00\x12\n\n\x06\x41\x43TIVE\x10\x01\x12\r\n\tCAUSATIVE\x10\x02\x12\x0b\n\x07PASSIVE\x10\x03"\x9a\x08\n\x0e\x44\x65pendencyEdge\x12\x18\n\x10head_token_index\x18\x01 \x01(\x05\x12\x42\n\x05label\x18\x02 \x01(\x0e\x32\x33.google.cloud.language.v1beta2.DependencyEdge.Label"\xa9\x07\n\x05Label\x12\x0b\n\x07UNKNOWN\x10\x00\x12\n\n\x06\x41\x42\x42REV\x10\x01\x12\t\n\x05\x41\x43OMP\x10\x02\x12\t\n\x05\x41\x44VCL\x10\x03\x12\n\n\x06\x41\x44VMOD\x10\x04\x12\x08\n\x04\x41MOD\x10\x05\x12\t\n\x05\x41PPOS\x10\x06\x12\x08\n\x04\x41TTR\x10\x07\x12\x07\n\x03\x41UX\x10\x08\x12\x0b\n\x07\x41UXPASS\x10\t\x12\x06\n\x02\x43\x43\x10\n\x12\t\n\x05\x43\x43OMP\x10\x0b\x12\x08\n\x04\x43ONJ\x10\x0c\x12\t\n\x05\x43SUBJ\x10\r\x12\r\n\tCSUBJPASS\x10\x0e\x12\x07\n\x03\x44\x45P\x10\x0f\x12\x07\n\x03\x44\x45T\x10\x10\x12\r\n\tDISCOURSE\x10\x11\x12\x08\n\x04\x44OBJ\x10\x12\x12\x08\n\x04\x45XPL\x10\x13\x12\x0c\n\x08GOESWITH\x10\x14\x12\x08\n\x04IOBJ\x10\x15\x12\x08\n\x04MARK\x10\x16\x12\x07\n\x03MWE\x10\x17\x12\x07\n\x03MWV\x10\x18\x12\x07\n\x03NEG\x10\x19\x12\x06\n\x02NN\x10\x1a\x12\x0c\n\x08NPADVMOD\x10\x1b\x12\t\n\x05NSUBJ\x10\x1c\x12\r\n\tNSUBJPASS\x10\x1d\x12\x07\n\x03NUM\x10\x1e\x12\n\n\x06NUMBER\x10\x1f\x12\x05\n\x01P\x10 \x12\r\n\tPARATAXIS\x10!\x12\x0b\n\x07PARTMOD\x10"\x12\t\n\x05PCOMP\x10#\x12\x08\n\x04POBJ\x10$\x12\x08\n\x04POSS\x10%\x12\x0b\n\x07POSTNEG\x10&\x12\x0b\n\x07PRECOMP\x10\'\x12\x0b\n\x07PRECONJ\x10(\x12\n\n\x06PREDET\x10)\x12\x08\n\x04PREF\x10*\x12\x08\n\x04PREP\x10+\x12\t\n\x05PRONL\x10,\x12\x07\n\x03PRT\x10-\x12\x06\n\x02PS\x10.\x12\x0c\n\x08QUANTMOD\x10/\x12\t\n\x05RCMOD\x10\x30\x12\x0c\n\x08RCMODREL\x10\x31\x12\t\n\x05RDROP\x10\x32\x12\x07\n\x03REF\x10\x33\x12\x0b\n\x07REMNANT\x10\x34\x12\x0e\n\nREPARANDUM\x10\x35\x12\x08\n\x04ROOT\x10\x36\x12\x08\n\x04SNUM\x10\x37\x12\x08\n\x04SUFF\x10\x38\x12\x08\n\x04TMOD\x10\x39\x12\t\n\x05TOPIC\x10:\x12\x08\n\x04VMOD\x10;\x12\x0c\n\x08VOCATIVE\x10<\x12\t\n\x05XCOMP\x10=\x12\n\n\x06SUFFIX\x10>\x12\t\n\x05TITLE\x10?\x12\x0c\n\x08\x41\x44VPHMOD\x10@\x12\x0b\n\x07\x41UXCAUS\x10\x41\x12\t\n\x05\x41UXVV\x10\x42\x12\t\n\x05\x44TMOD\x10\x43\x12\x0b\n\x07\x46OREIGN\x10\x44\x12\x06\n\x02KW\x10\x45\x12\x08\n\x04LIST\x10\x46\x12\x08\n\x04NOMC\x10G\x12\x0c\n\x08NOMCSUBJ\x10H\x12\x10\n\x0cNOMCSUBJPASS\x10I\x12\x08\n\x04NUMC\x10J\x12\x07\n\x03\x43OP\x10K\x12\x0e\n\nDISLOCATED\x10L\x12\x07\n\x03\x41SP\x10M\x12\x08\n\x04GMOD\x10N\x12\x08\n\x04GOBJ\x10O\x12\n\n\x06INFMOD\x10P\x12\x07\n\x03MES\x10Q\x12\t\n\x05NCOMP\x10R"\xf6\x01\n\rEntityMention\x12\x35\n\x04text\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.TextSpan\x12?\n\x04type\x18\x02 \x01(\x0e\x32\x31.google.cloud.language.v1beta2.EntityMention.Type\x12;\n\tsentiment\x18\x03 \x01(\x0b\x32(.google.cloud.language.v1beta2.Sentiment"0\n\x04Type\x12\x10\n\x0cTYPE_UNKNOWN\x10\x00\x12\n\n\x06PROPER\x10\x01\x12\n\n\x06\x43OMMON\x10\x02"1\n\x08TextSpan\x12\x0f\n\x07\x63ontent\x18\x01 \x01(\t\x12\x14\n\x0c\x62\x65gin_offset\x18\x02 \x01(\x05":\n\x16\x43lassificationCategory\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x12\n\nconfidence\x18\x02 \x01(\x02"\x98\x01\n\x17\x41nalyzeSentimentRequest\x12\x39\n\x08\x64ocument\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.Document\x12\x42\n\rencoding_type\x18\x02 \x01(\x0e\x32+.google.cloud.language.v1beta2.EncodingType"\xae\x01\n\x18\x41nalyzeSentimentResponse\x12\x44\n\x12\x64ocument_sentiment\x18\x01 \x01(\x0b\x32(.google.cloud.language.v1beta2.Sentiment\x12\x10\n\x08language\x18\x02 \x01(\t\x12:\n\tsentences\x18\x03 \x03(\x0b\x32\'.google.cloud.language.v1beta2.Sentence"\x9e\x01\n\x1d\x41nalyzeEntitySentimentRequest\x12\x39\n\x08\x64ocument\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.Document\x12\x42\n\rencoding_type\x18\x02 \x01(\x0e\x32+.google.cloud.language.v1beta2.EncodingType"k\n\x1e\x41nalyzeEntitySentimentResponse\x12\x37\n\x08\x65ntities\x18\x01 \x03(\x0b\x32%.google.cloud.language.v1beta2.Entity\x12\x10\n\x08language\x18\x02 \x01(\t"\x97\x01\n\x16\x41nalyzeEntitiesRequest\x12\x39\n\x08\x64ocument\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.Document\x12\x42\n\rencoding_type\x18\x02 \x01(\x0e\x32+.google.cloud.language.v1beta2.EncodingType"d\n\x17\x41nalyzeEntitiesResponse\x12\x37\n\x08\x65ntities\x18\x01 \x03(\x0b\x32%.google.cloud.language.v1beta2.Entity\x12\x10\n\x08language\x18\x02 \x01(\t"\x95\x01\n\x14\x41nalyzeSyntaxRequest\x12\x39\n\x08\x64ocument\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.Document\x12\x42\n\rencoding_type\x18\x02 \x01(\x0e\x32+.google.cloud.language.v1beta2.EncodingType"\x9b\x01\n\x15\x41nalyzeSyntaxResponse\x12:\n\tsentences\x18\x01 \x03(\x0b\x32\'.google.cloud.language.v1beta2.Sentence\x12\x34\n\x06tokens\x18\x02 \x03(\x0b\x32$.google.cloud.language.v1beta2.Token\x12\x10\n\x08language\x18\x03 \x01(\t"P\n\x13\x43lassifyTextRequest\x12\x39\n\x08\x64ocument\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.Document"a\n\x14\x43lassifyTextResponse\x12I\n\ncategories\x18\x01 \x03(\x0b\x32\x35.google.cloud.language.v1beta2.ClassificationCategory"\xff\x02\n\x13\x41nnotateTextRequest\x12\x39\n\x08\x64ocument\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.Document\x12M\n\x08\x66\x65\x61tures\x18\x02 \x01(\x0b\x32;.google.cloud.language.v1beta2.AnnotateTextRequest.Features\x12\x42\n\rencoding_type\x18\x03 \x01(\x0e\x32+.google.cloud.language.v1beta2.EncodingType\x1a\x99\x01\n\x08\x46\x65\x61tures\x12\x16\n\x0e\x65xtract_syntax\x18\x01 \x01(\x08\x12\x18\n\x10\x65xtract_entities\x18\x02 \x01(\x08\x12"\n\x1a\x65xtract_document_sentiment\x18\x03 \x01(\x08\x12 \n\x18\x65xtract_entity_sentiment\x18\x04 \x01(\x08\x12\x15\n\rclassify_text\x18\x06 \x01(\x08"\xe4\x02\n\x14\x41nnotateTextResponse\x12:\n\tsentences\x18\x01 \x03(\x0b\x32\'.google.cloud.language.v1beta2.Sentence\x12\x34\n\x06tokens\x18\x02 \x03(\x0b\x32$.google.cloud.language.v1beta2.Token\x12\x37\n\x08\x65ntities\x18\x03 \x03(\x0b\x32%.google.cloud.language.v1beta2.Entity\x12\x44\n\x12\x64ocument_sentiment\x18\x04 \x01(\x0b\x32(.google.cloud.language.v1beta2.Sentiment\x12\x10\n\x08language\x18\x05 \x01(\t\x12I\n\ncategories\x18\x06 \x03(\x0b\x32\x35.google.cloud.language.v1beta2.ClassificationCategory*8\n\x0c\x45ncodingType\x12\x08\n\x04NONE\x10\x00\x12\x08\n\x04UTF8\x10\x01\x12\t\n\x05UTF16\x10\x02\x12\t\n\x05UTF32\x10\x03\x32\xbd\x08\n\x0fLanguageService\x12\xb3\x01\n\x10\x41nalyzeSentiment\x12\x36.google.cloud.language.v1beta2.AnalyzeSentimentRequest\x1a\x37.google.cloud.language.v1beta2.AnalyzeSentimentResponse".\x82\xd3\xe4\x93\x02("#/v1beta2/documents:analyzeSentiment:\x01*\x12\xaf\x01\n\x0f\x41nalyzeEntities\x12\x35.google.cloud.language.v1beta2.AnalyzeEntitiesRequest\x1a\x36.google.cloud.language.v1beta2.AnalyzeEntitiesResponse"-\x82\xd3\xe4\x93\x02\'""/v1beta2/documents:analyzeEntities:\x01*\x12\xcb\x01\n\x16\x41nalyzeEntitySentiment\x12<.google.cloud.language.v1beta2.AnalyzeEntitySentimentRequest\x1a=.google.cloud.language.v1beta2.AnalyzeEntitySentimentResponse"4\x82\xd3\xe4\x93\x02.")/v1beta2/documents:analyzeEntitySentiment:\x01*\x12\xa7\x01\n\rAnalyzeSyntax\x12\x33.google.cloud.language.v1beta2.AnalyzeSyntaxRequest\x1a\x34.google.cloud.language.v1beta2.AnalyzeSyntaxResponse"+\x82\xd3\xe4\x93\x02%" /v1beta2/documents:analyzeSyntax:\x01*\x12\xa3\x01\n\x0c\x43lassifyText\x12\x32.google.cloud.language.v1beta2.ClassifyTextRequest\x1a\x33.google.cloud.language.v1beta2.ClassifyTextResponse"*\x82\xd3\xe4\x93\x02$"\x1f/v1beta2/documents:classifyText:\x01*\x12\xa3\x01\n\x0c\x41nnotateText\x12\x32.google.cloud.language.v1beta2.AnnotateTextRequest\x1a\x33.google.cloud.language.v1beta2.AnnotateTextResponse"*\x82\xd3\xe4\x93\x02$"\x1f/v1beta2/documents:annotateText:\x01*B\x82\x01\n!com.google.cloud.language.v1beta2B\x14LanguageServiceProtoP\x01ZEgoogle.golang.org/genproto/googleapis/cloud/language/v1beta2;languageb\x06proto3' + '\n:google/cloud/language_v1beta2/proto/language_service.proto\x12\x1dgoogle.cloud.language.v1beta2\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xc8\x01\n\x08\x44ocument\x12:\n\x04type\x18\x01 \x01(\x0e\x32,.google.cloud.language.v1beta2.Document.Type\x12\x11\n\x07\x63ontent\x18\x02 \x01(\tH\x00\x12\x19\n\x0fgcs_content_uri\x18\x03 \x01(\tH\x00\x12\x10\n\x08language\x18\x04 \x01(\t"6\n\x04Type\x12\x14\n\x10TYPE_UNSPECIFIED\x10\x00\x12\x0e\n\nPLAIN_TEXT\x10\x01\x12\x08\n\x04HTML\x10\x02\x42\x08\n\x06source"~\n\x08Sentence\x12\x35\n\x04text\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.TextSpan\x12;\n\tsentiment\x18\x02 \x01(\x0b\x32(.google.cloud.language.v1beta2.Sentiment"\x93\x04\n\x06\x45ntity\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x38\n\x04type\x18\x02 \x01(\x0e\x32*.google.cloud.language.v1beta2.Entity.Type\x12\x45\n\x08metadata\x18\x03 \x03(\x0b\x32\x33.google.cloud.language.v1beta2.Entity.MetadataEntry\x12\x10\n\x08salience\x18\x04 \x01(\x02\x12>\n\x08mentions\x18\x05 \x03(\x0b\x32,.google.cloud.language.v1beta2.EntityMention\x12;\n\tsentiment\x18\x06 \x01(\x0b\x32(.google.cloud.language.v1beta2.Sentiment\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xb9\x01\n\x04Type\x12\x0b\n\x07UNKNOWN\x10\x00\x12\n\n\x06PERSON\x10\x01\x12\x0c\n\x08LOCATION\x10\x02\x12\x10\n\x0cORGANIZATION\x10\x03\x12\t\n\x05\x45VENT\x10\x04\x12\x0f\n\x0bWORK_OF_ART\x10\x05\x12\x11\n\rCONSUMER_GOOD\x10\x06\x12\t\n\x05OTHER\x10\x07\x12\x10\n\x0cPHONE_NUMBER\x10\t\x12\x0b\n\x07\x41\x44\x44RESS\x10\n\x12\x08\n\x04\x44\x41TE\x10\x0b\x12\n\n\x06NUMBER\x10\x0c\x12\t\n\x05PRICE\x10\r"\xda\x01\n\x05Token\x12\x35\n\x04text\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.TextSpan\x12\x43\n\x0epart_of_speech\x18\x02 \x01(\x0b\x32+.google.cloud.language.v1beta2.PartOfSpeech\x12\x46\n\x0f\x64\x65pendency_edge\x18\x03 \x01(\x0b\x32-.google.cloud.language.v1beta2.DependencyEdge\x12\r\n\x05lemma\x18\x04 \x01(\t"-\n\tSentiment\x12\x11\n\tmagnitude\x18\x02 \x01(\x02\x12\r\n\x05score\x18\x03 \x01(\x02"\xdf\x10\n\x0cPartOfSpeech\x12<\n\x03tag\x18\x01 \x01(\x0e\x32/.google.cloud.language.v1beta2.PartOfSpeech.Tag\x12\x42\n\x06\x61spect\x18\x02 \x01(\x0e\x32\x32.google.cloud.language.v1beta2.PartOfSpeech.Aspect\x12>\n\x04\x63\x61se\x18\x03 \x01(\x0e\x32\x30.google.cloud.language.v1beta2.PartOfSpeech.Case\x12>\n\x04\x66orm\x18\x04 \x01(\x0e\x32\x30.google.cloud.language.v1beta2.PartOfSpeech.Form\x12\x42\n\x06gender\x18\x05 \x01(\x0e\x32\x32.google.cloud.language.v1beta2.PartOfSpeech.Gender\x12>\n\x04mood\x18\x06 \x01(\x0e\x32\x30.google.cloud.language.v1beta2.PartOfSpeech.Mood\x12\x42\n\x06number\x18\x07 \x01(\x0e\x32\x32.google.cloud.language.v1beta2.PartOfSpeech.Number\x12\x42\n\x06person\x18\x08 \x01(\x0e\x32\x32.google.cloud.language.v1beta2.PartOfSpeech.Person\x12\x42\n\x06proper\x18\t \x01(\x0e\x32\x32.google.cloud.language.v1beta2.PartOfSpeech.Proper\x12L\n\x0breciprocity\x18\n \x01(\x0e\x32\x37.google.cloud.language.v1beta2.PartOfSpeech.Reciprocity\x12@\n\x05tense\x18\x0b \x01(\x0e\x32\x31.google.cloud.language.v1beta2.PartOfSpeech.Tense\x12@\n\x05voice\x18\x0c \x01(\x0e\x32\x31.google.cloud.language.v1beta2.PartOfSpeech.Voice"\x8d\x01\n\x03Tag\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x07\n\x03\x41\x44J\x10\x01\x12\x07\n\x03\x41\x44P\x10\x02\x12\x07\n\x03\x41\x44V\x10\x03\x12\x08\n\x04\x43ONJ\x10\x04\x12\x07\n\x03\x44\x45T\x10\x05\x12\x08\n\x04NOUN\x10\x06\x12\x07\n\x03NUM\x10\x07\x12\x08\n\x04PRON\x10\x08\x12\x07\n\x03PRT\x10\t\x12\t\n\x05PUNCT\x10\n\x12\x08\n\x04VERB\x10\x0b\x12\x05\n\x01X\x10\x0c\x12\t\n\x05\x41\x46\x46IX\x10\r"O\n\x06\x41spect\x12\x12\n\x0e\x41SPECT_UNKNOWN\x10\x00\x12\x0e\n\nPERFECTIVE\x10\x01\x12\x10\n\x0cIMPERFECTIVE\x10\x02\x12\x0f\n\x0bPROGRESSIVE\x10\x03"\xf8\x01\n\x04\x43\x61se\x12\x10\n\x0c\x43\x41SE_UNKNOWN\x10\x00\x12\x0e\n\nACCUSATIVE\x10\x01\x12\r\n\tADVERBIAL\x10\x02\x12\x11\n\rCOMPLEMENTIVE\x10\x03\x12\n\n\x06\x44\x41TIVE\x10\x04\x12\x0c\n\x08GENITIVE\x10\x05\x12\x10\n\x0cINSTRUMENTAL\x10\x06\x12\x0c\n\x08LOCATIVE\x10\x07\x12\x0e\n\nNOMINATIVE\x10\x08\x12\x0b\n\x07OBLIQUE\x10\t\x12\r\n\tPARTITIVE\x10\n\x12\x11\n\rPREPOSITIONAL\x10\x0b\x12\x12\n\x0eREFLEXIVE_CASE\x10\x0c\x12\x11\n\rRELATIVE_CASE\x10\r\x12\x0c\n\x08VOCATIVE\x10\x0e"\xaf\x01\n\x04\x46orm\x12\x10\n\x0c\x46ORM_UNKNOWN\x10\x00\x12\x0c\n\x08\x41\x44NOMIAL\x10\x01\x12\r\n\tAUXILIARY\x10\x02\x12\x12\n\x0e\x43OMPLEMENTIZER\x10\x03\x12\x10\n\x0c\x46INAL_ENDING\x10\x04\x12\n\n\x06GERUND\x10\x05\x12\n\n\x06REALIS\x10\x06\x12\x0c\n\x08IRREALIS\x10\x07\x12\t\n\x05SHORT\x10\x08\x12\x08\n\x04LONG\x10\t\x12\t\n\x05ORDER\x10\n\x12\x0c\n\x08SPECIFIC\x10\x0b"E\n\x06Gender\x12\x12\n\x0eGENDER_UNKNOWN\x10\x00\x12\x0c\n\x08\x46\x45MININE\x10\x01\x12\r\n\tMASCULINE\x10\x02\x12\n\n\x06NEUTER\x10\x03"\x7f\n\x04Mood\x12\x10\n\x0cMOOD_UNKNOWN\x10\x00\x12\x14\n\x10\x43ONDITIONAL_MOOD\x10\x01\x12\x0e\n\nIMPERATIVE\x10\x02\x12\x0e\n\nINDICATIVE\x10\x03\x12\x11\n\rINTERROGATIVE\x10\x04\x12\x0b\n\x07JUSSIVE\x10\x05\x12\x0f\n\x0bSUBJUNCTIVE\x10\x06"@\n\x06Number\x12\x12\n\x0eNUMBER_UNKNOWN\x10\x00\x12\x0c\n\x08SINGULAR\x10\x01\x12\n\n\x06PLURAL\x10\x02\x12\x08\n\x04\x44UAL\x10\x03"T\n\x06Person\x12\x12\n\x0ePERSON_UNKNOWN\x10\x00\x12\t\n\x05\x46IRST\x10\x01\x12\n\n\x06SECOND\x10\x02\x12\t\n\x05THIRD\x10\x03\x12\x14\n\x10REFLEXIVE_PERSON\x10\x04"8\n\x06Proper\x12\x12\n\x0ePROPER_UNKNOWN\x10\x00\x12\n\n\x06PROPER\x10\x01\x12\x0e\n\nNOT_PROPER\x10\x02"J\n\x0bReciprocity\x12\x17\n\x13RECIPROCITY_UNKNOWN\x10\x00\x12\x0e\n\nRECIPROCAL\x10\x01\x12\x12\n\x0eNON_RECIPROCAL\x10\x02"s\n\x05Tense\x12\x11\n\rTENSE_UNKNOWN\x10\x00\x12\x15\n\x11\x43ONDITIONAL_TENSE\x10\x01\x12\n\n\x06\x46UTURE\x10\x02\x12\x08\n\x04PAST\x10\x03\x12\x0b\n\x07PRESENT\x10\x04\x12\r\n\tIMPERFECT\x10\x05\x12\x0e\n\nPLUPERFECT\x10\x06"B\n\x05Voice\x12\x11\n\rVOICE_UNKNOWN\x10\x00\x12\n\n\x06\x41\x43TIVE\x10\x01\x12\r\n\tCAUSATIVE\x10\x02\x12\x0b\n\x07PASSIVE\x10\x03"\x9a\x08\n\x0e\x44\x65pendencyEdge\x12\x18\n\x10head_token_index\x18\x01 \x01(\x05\x12\x42\n\x05label\x18\x02 \x01(\x0e\x32\x33.google.cloud.language.v1beta2.DependencyEdge.Label"\xa9\x07\n\x05Label\x12\x0b\n\x07UNKNOWN\x10\x00\x12\n\n\x06\x41\x42\x42REV\x10\x01\x12\t\n\x05\x41\x43OMP\x10\x02\x12\t\n\x05\x41\x44VCL\x10\x03\x12\n\n\x06\x41\x44VMOD\x10\x04\x12\x08\n\x04\x41MOD\x10\x05\x12\t\n\x05\x41PPOS\x10\x06\x12\x08\n\x04\x41TTR\x10\x07\x12\x07\n\x03\x41UX\x10\x08\x12\x0b\n\x07\x41UXPASS\x10\t\x12\x06\n\x02\x43\x43\x10\n\x12\t\n\x05\x43\x43OMP\x10\x0b\x12\x08\n\x04\x43ONJ\x10\x0c\x12\t\n\x05\x43SUBJ\x10\r\x12\r\n\tCSUBJPASS\x10\x0e\x12\x07\n\x03\x44\x45P\x10\x0f\x12\x07\n\x03\x44\x45T\x10\x10\x12\r\n\tDISCOURSE\x10\x11\x12\x08\n\x04\x44OBJ\x10\x12\x12\x08\n\x04\x45XPL\x10\x13\x12\x0c\n\x08GOESWITH\x10\x14\x12\x08\n\x04IOBJ\x10\x15\x12\x08\n\x04MARK\x10\x16\x12\x07\n\x03MWE\x10\x17\x12\x07\n\x03MWV\x10\x18\x12\x07\n\x03NEG\x10\x19\x12\x06\n\x02NN\x10\x1a\x12\x0c\n\x08NPADVMOD\x10\x1b\x12\t\n\x05NSUBJ\x10\x1c\x12\r\n\tNSUBJPASS\x10\x1d\x12\x07\n\x03NUM\x10\x1e\x12\n\n\x06NUMBER\x10\x1f\x12\x05\n\x01P\x10 \x12\r\n\tPARATAXIS\x10!\x12\x0b\n\x07PARTMOD\x10"\x12\t\n\x05PCOMP\x10#\x12\x08\n\x04POBJ\x10$\x12\x08\n\x04POSS\x10%\x12\x0b\n\x07POSTNEG\x10&\x12\x0b\n\x07PRECOMP\x10\'\x12\x0b\n\x07PRECONJ\x10(\x12\n\n\x06PREDET\x10)\x12\x08\n\x04PREF\x10*\x12\x08\n\x04PREP\x10+\x12\t\n\x05PRONL\x10,\x12\x07\n\x03PRT\x10-\x12\x06\n\x02PS\x10.\x12\x0c\n\x08QUANTMOD\x10/\x12\t\n\x05RCMOD\x10\x30\x12\x0c\n\x08RCMODREL\x10\x31\x12\t\n\x05RDROP\x10\x32\x12\x07\n\x03REF\x10\x33\x12\x0b\n\x07REMNANT\x10\x34\x12\x0e\n\nREPARANDUM\x10\x35\x12\x08\n\x04ROOT\x10\x36\x12\x08\n\x04SNUM\x10\x37\x12\x08\n\x04SUFF\x10\x38\x12\x08\n\x04TMOD\x10\x39\x12\t\n\x05TOPIC\x10:\x12\x08\n\x04VMOD\x10;\x12\x0c\n\x08VOCATIVE\x10<\x12\t\n\x05XCOMP\x10=\x12\n\n\x06SUFFIX\x10>\x12\t\n\x05TITLE\x10?\x12\x0c\n\x08\x41\x44VPHMOD\x10@\x12\x0b\n\x07\x41UXCAUS\x10\x41\x12\t\n\x05\x41UXVV\x10\x42\x12\t\n\x05\x44TMOD\x10\x43\x12\x0b\n\x07\x46OREIGN\x10\x44\x12\x06\n\x02KW\x10\x45\x12\x08\n\x04LIST\x10\x46\x12\x08\n\x04NOMC\x10G\x12\x0c\n\x08NOMCSUBJ\x10H\x12\x10\n\x0cNOMCSUBJPASS\x10I\x12\x08\n\x04NUMC\x10J\x12\x07\n\x03\x43OP\x10K\x12\x0e\n\nDISLOCATED\x10L\x12\x07\n\x03\x41SP\x10M\x12\x08\n\x04GMOD\x10N\x12\x08\n\x04GOBJ\x10O\x12\n\n\x06INFMOD\x10P\x12\x07\n\x03MES\x10Q\x12\t\n\x05NCOMP\x10R"\xf6\x01\n\rEntityMention\x12\x35\n\x04text\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.TextSpan\x12?\n\x04type\x18\x02 \x01(\x0e\x32\x31.google.cloud.language.v1beta2.EntityMention.Type\x12;\n\tsentiment\x18\x03 \x01(\x0b\x32(.google.cloud.language.v1beta2.Sentiment"0\n\x04Type\x12\x10\n\x0cTYPE_UNKNOWN\x10\x00\x12\n\n\x06PROPER\x10\x01\x12\n\n\x06\x43OMMON\x10\x02"1\n\x08TextSpan\x12\x0f\n\x07\x63ontent\x18\x01 \x01(\t\x12\x14\n\x0c\x62\x65gin_offset\x18\x02 \x01(\x05":\n\x16\x43lassificationCategory\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x12\n\nconfidence\x18\x02 \x01(\x02"\x9d\x01\n\x17\x41nalyzeSentimentRequest\x12>\n\x08\x64ocument\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.DocumentB\x03\xe0\x41\x02\x12\x42\n\rencoding_type\x18\x02 \x01(\x0e\x32+.google.cloud.language.v1beta2.EncodingType"\xae\x01\n\x18\x41nalyzeSentimentResponse\x12\x44\n\x12\x64ocument_sentiment\x18\x01 \x01(\x0b\x32(.google.cloud.language.v1beta2.Sentiment\x12\x10\n\x08language\x18\x02 \x01(\t\x12:\n\tsentences\x18\x03 \x03(\x0b\x32\'.google.cloud.language.v1beta2.Sentence"\xa3\x01\n\x1d\x41nalyzeEntitySentimentRequest\x12>\n\x08\x64ocument\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.DocumentB\x03\xe0\x41\x02\x12\x42\n\rencoding_type\x18\x02 \x01(\x0e\x32+.google.cloud.language.v1beta2.EncodingType"k\n\x1e\x41nalyzeEntitySentimentResponse\x12\x37\n\x08\x65ntities\x18\x01 \x03(\x0b\x32%.google.cloud.language.v1beta2.Entity\x12\x10\n\x08language\x18\x02 \x01(\t"\x9c\x01\n\x16\x41nalyzeEntitiesRequest\x12>\n\x08\x64ocument\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.DocumentB\x03\xe0\x41\x02\x12\x42\n\rencoding_type\x18\x02 \x01(\x0e\x32+.google.cloud.language.v1beta2.EncodingType"d\n\x17\x41nalyzeEntitiesResponse\x12\x37\n\x08\x65ntities\x18\x01 \x03(\x0b\x32%.google.cloud.language.v1beta2.Entity\x12\x10\n\x08language\x18\x02 \x01(\t"\x9a\x01\n\x14\x41nalyzeSyntaxRequest\x12>\n\x08\x64ocument\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.DocumentB\x03\xe0\x41\x02\x12\x42\n\rencoding_type\x18\x02 \x01(\x0e\x32+.google.cloud.language.v1beta2.EncodingType"\x9b\x01\n\x15\x41nalyzeSyntaxResponse\x12:\n\tsentences\x18\x01 \x03(\x0b\x32\'.google.cloud.language.v1beta2.Sentence\x12\x34\n\x06tokens\x18\x02 \x03(\x0b\x32$.google.cloud.language.v1beta2.Token\x12\x10\n\x08language\x18\x03 \x01(\t"U\n\x13\x43lassifyTextRequest\x12>\n\x08\x64ocument\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.DocumentB\x03\xe0\x41\x02"a\n\x14\x43lassifyTextResponse\x12I\n\ncategories\x18\x01 \x03(\x0b\x32\x35.google.cloud.language.v1beta2.ClassificationCategory"\x89\x03\n\x13\x41nnotateTextRequest\x12>\n\x08\x64ocument\x18\x01 \x01(\x0b\x32\'.google.cloud.language.v1beta2.DocumentB\x03\xe0\x41\x02\x12R\n\x08\x66\x65\x61tures\x18\x02 \x01(\x0b\x32;.google.cloud.language.v1beta2.AnnotateTextRequest.FeaturesB\x03\xe0\x41\x02\x12\x42\n\rencoding_type\x18\x03 \x01(\x0e\x32+.google.cloud.language.v1beta2.EncodingType\x1a\x99\x01\n\x08\x46\x65\x61tures\x12\x16\n\x0e\x65xtract_syntax\x18\x01 \x01(\x08\x12\x18\n\x10\x65xtract_entities\x18\x02 \x01(\x08\x12"\n\x1a\x65xtract_document_sentiment\x18\x03 \x01(\x08\x12 \n\x18\x65xtract_entity_sentiment\x18\x04 \x01(\x08\x12\x15\n\rclassify_text\x18\x06 \x01(\x08"\xe4\x02\n\x14\x41nnotateTextResponse\x12:\n\tsentences\x18\x01 \x03(\x0b\x32\'.google.cloud.language.v1beta2.Sentence\x12\x34\n\x06tokens\x18\x02 \x03(\x0b\x32$.google.cloud.language.v1beta2.Token\x12\x37\n\x08\x65ntities\x18\x03 \x03(\x0b\x32%.google.cloud.language.v1beta2.Entity\x12\x44\n\x12\x64ocument_sentiment\x18\x04 \x01(\x0b\x32(.google.cloud.language.v1beta2.Sentiment\x12\x10\n\x08language\x18\x05 \x01(\t\x12I\n\ncategories\x18\x06 \x03(\x0b\x32\x35.google.cloud.language.v1beta2.ClassificationCategory*8\n\x0c\x45ncodingType\x12\x08\n\x04NONE\x10\x00\x12\x08\n\x04UTF8\x10\x01\x12\t\n\x05UTF16\x10\x02\x12\t\n\x05UTF32\x10\x03\x32\x8a\x0b\n\x0fLanguageService\x12\xd7\x01\n\x10\x41nalyzeSentiment\x12\x36.google.cloud.language.v1beta2.AnalyzeSentimentRequest\x1a\x37.google.cloud.language.v1beta2.AnalyzeSentimentResponse"R\x82\xd3\xe4\x93\x02("#/v1beta2/documents:analyzeSentiment:\x01*\xda\x41\x16\x64ocument,encoding_type\xda\x41\x08\x64ocument\x12\xd3\x01\n\x0f\x41nalyzeEntities\x12\x35.google.cloud.language.v1beta2.AnalyzeEntitiesRequest\x1a\x36.google.cloud.language.v1beta2.AnalyzeEntitiesResponse"Q\x82\xd3\xe4\x93\x02\'""/v1beta2/documents:analyzeEntities:\x01*\xda\x41\x16\x64ocument,encoding_type\xda\x41\x08\x64ocument\x12\xef\x01\n\x16\x41nalyzeEntitySentiment\x12<.google.cloud.language.v1beta2.AnalyzeEntitySentimentRequest\x1a=.google.cloud.language.v1beta2.AnalyzeEntitySentimentResponse"X\x82\xd3\xe4\x93\x02.")/v1beta2/documents:analyzeEntitySentiment:\x01*\xda\x41\x16\x64ocument,encoding_type\xda\x41\x08\x64ocument\x12\xcb\x01\n\rAnalyzeSyntax\x12\x33.google.cloud.language.v1beta2.AnalyzeSyntaxRequest\x1a\x34.google.cloud.language.v1beta2.AnalyzeSyntaxResponse"O\x82\xd3\xe4\x93\x02%" /v1beta2/documents:analyzeSyntax:\x01*\xda\x41\x16\x64ocument,encoding_type\xda\x41\x08\x64ocument\x12\xae\x01\n\x0c\x43lassifyText\x12\x32.google.cloud.language.v1beta2.ClassifyTextRequest\x1a\x33.google.cloud.language.v1beta2.ClassifyTextResponse"5\x82\xd3\xe4\x93\x02$"\x1f/v1beta2/documents:classifyText:\x01*\xda\x41\x08\x64ocument\x12\xd9\x01\n\x0c\x41nnotateText\x12\x32.google.cloud.language.v1beta2.AnnotateTextRequest\x1a\x33.google.cloud.language.v1beta2.AnnotateTextResponse"`\x82\xd3\xe4\x93\x02$"\x1f/v1beta2/documents:annotateText:\x01*\xda\x41\x1f\x64ocument,features,encoding_type\xda\x41\x11\x64ocument,features\x1az\xca\x41\x17language.googleapis.com\xd2\x41]https://www.googleapis.com/auth/cloud-language,https://www.googleapis.com/auth/cloud-platformB\x82\x01\n!com.google.cloud.language.v1beta2B\x14LanguageServiceProtoP\x01ZEgoogle.golang.org/genproto/googleapis/cloud/language/v1beta2;languageb\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, - google_dot_longrunning_dot_operations__pb2.DESCRIPTOR, + google_dot_api_dot_client__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, - google_dot_rpc_dot_status__pb2.DESCRIPTOR, ], ) @@ -63,8 +61,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=6939, - serialized_end=6995, + serialized_start=7035, + serialized_end=7091, ) _sym_db.RegisterEnumDescriptor(_ENCODINGTYPE) @@ -97,8 +95,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=355, - serialized_end=409, + serialized_start=351, + serialized_end=405, ) _sym_db.RegisterEnumDescriptor(_DOCUMENT_TYPE) @@ -132,11 +130,26 @@ _descriptor.EnumValueDescriptor( name="OTHER", index=7, number=7, serialized_options=None, type=None ), + _descriptor.EnumValueDescriptor( + name="PHONE_NUMBER", index=8, number=9, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="ADDRESS", index=9, number=10, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="DATE", index=10, number=11, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="NUMBER", index=11, number=12, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="PRICE", index=12, number=13, serialized_options=None, type=None + ), ], containing_type=None, serialized_options=None, - serialized_start=895, - serialized_end=1016, + serialized_start=892, + serialized_end=1077, ) _sym_db.RegisterEnumDescriptor(_ENTITY_TYPE) @@ -191,8 +204,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=2108, - serialized_end=2249, + serialized_start=2169, + serialized_end=2310, ) _sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_TAG) @@ -217,8 +230,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=2251, - serialized_end=2330, + serialized_start=2312, + serialized_end=2391, ) _sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_ASPECT) @@ -288,8 +301,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=2333, - serialized_end=2581, + serialized_start=2394, + serialized_end=2642, ) _sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_CASE) @@ -338,8 +351,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=2584, - serialized_end=2759, + serialized_start=2645, + serialized_end=2820, ) _sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_FORM) @@ -364,8 +377,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=2761, - serialized_end=2830, + serialized_start=2822, + serialized_end=2891, ) _sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_GENDER) @@ -403,8 +416,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=2832, - serialized_end=2959, + serialized_start=2893, + serialized_end=3020, ) _sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_MOOD) @@ -429,8 +442,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=2961, - serialized_end=3025, + serialized_start=3022, + serialized_end=3086, ) _sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_NUMBER) @@ -462,8 +475,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=3027, - serialized_end=3111, + serialized_start=3088, + serialized_end=3172, ) _sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_PERSON) @@ -485,8 +498,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=3113, - serialized_end=3169, + serialized_start=3174, + serialized_end=3230, ) _sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_PROPER) @@ -512,8 +525,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=3171, - serialized_end=3245, + serialized_start=3232, + serialized_end=3306, ) _sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_RECIPROCITY) @@ -551,8 +564,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=3247, - serialized_end=3362, + serialized_start=3308, + serialized_end=3423, ) _sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_TENSE) @@ -577,8 +590,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=3364, - serialized_end=3430, + serialized_start=3425, + serialized_end=3491, ) _sym_db.RegisterEnumDescriptor(_PARTOFSPEECH_VOICE) @@ -840,8 +853,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=3546, - serialized_end=4483, + serialized_start=3607, + serialized_end=4544, ) _sym_db.RegisterEnumDescriptor(_DEPENDENCYEDGE_LABEL) @@ -863,8 +876,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=4684, - serialized_end=4732, + serialized_start=4745, + serialized_end=4793, ) _sym_db.RegisterEnumDescriptor(_ENTITYMENTION_TYPE) @@ -965,8 +978,8 @@ fields=[], ) ], - serialized_start=219, - serialized_end=419, + serialized_start=215, + serialized_end=415, ) @@ -1022,8 +1035,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=421, - serialized_end=547, + serialized_start=417, + serialized_end=543, ) @@ -1079,8 +1092,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=846, - serialized_end=893, + serialized_start=842, + serialized_end=889, ) _ENTITY = _descriptor.Descriptor( @@ -1207,8 +1220,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=550, - serialized_end=1016, + serialized_start=546, + serialized_end=1077, ) @@ -1300,8 +1313,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1019, - serialized_end=1237, + serialized_start=1080, + serialized_end=1298, ) @@ -1357,8 +1370,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1239, - serialized_end=1284, + serialized_start=1300, + serialized_end=1345, ) @@ -1607,8 +1620,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1287, - serialized_end=3430, + serialized_start=1348, + serialized_end=3491, ) @@ -1664,8 +1677,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3433, - serialized_end=4483, + serialized_start=3494, + serialized_end=4544, ) @@ -1739,8 +1752,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4486, - serialized_end=4732, + serialized_start=4547, + serialized_end=4793, ) @@ -1796,8 +1809,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4734, - serialized_end=4783, + serialized_start=4795, + serialized_end=4844, ) @@ -1853,8 +1866,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4785, - serialized_end=4843, + serialized_start=4846, + serialized_end=4904, ) @@ -1880,7 +1893,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1910,8 +1923,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4846, - serialized_end=4998, + serialized_start=4907, + serialized_end=5064, ) @@ -1985,8 +1998,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5001, - serialized_end=5175, + serialized_start=5067, + serialized_end=5241, ) @@ -2012,7 +2025,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2042,8 +2055,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5178, - serialized_end=5336, + serialized_start=5244, + serialized_end=5407, ) @@ -2099,8 +2112,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5338, - serialized_end=5445, + serialized_start=5409, + serialized_end=5516, ) @@ -2126,7 +2139,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2156,8 +2169,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5448, - serialized_end=5599, + serialized_start=5519, + serialized_end=5675, ) @@ -2213,8 +2226,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5601, - serialized_end=5701, + serialized_start=5677, + serialized_end=5777, ) @@ -2240,7 +2253,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2270,8 +2283,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5704, - serialized_end=5853, + serialized_start=5780, + serialized_end=5934, ) @@ -2345,8 +2358,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5856, - serialized_end=6011, + serialized_start=5937, + serialized_end=6092, ) @@ -2372,7 +2385,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ) ], @@ -2384,8 +2397,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=6013, - serialized_end=6093, + serialized_start=6094, + serialized_end=6179, ) @@ -2423,8 +2436,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=6095, - serialized_end=6192, + serialized_start=6181, + serialized_end=6278, ) @@ -2534,8 +2547,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=6425, - serialized_end=6578, + serialized_start=6521, + serialized_end=6674, ) _ANNOTATETEXTREQUEST = _descriptor.Descriptor( @@ -2560,7 +2573,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2578,7 +2591,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2608,8 +2621,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=6195, - serialized_end=6578, + serialized_start=6281, + serialized_end=6674, ) @@ -2737,8 +2750,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=6581, - serialized_end=6937, + serialized_start=6677, + serialized_end=7033, ) _DOCUMENT.fields_by_name["type"].enum_type = _DOCUMENT_TYPE @@ -2871,7 +2884,8 @@ The source of the document: a string containing the content or a Google Cloud Storage URI. content: - The content of the input in string format. + The content of the input in string format. Cloud audit logging + exempt since it is based on user data. gcs_content_uri: The Google Cloud Storage URI where the file content is located. This URI must be of the form: @@ -2943,9 +2957,11 @@ type: The entity type. metadata: - Metadata associated with the entity. Currently, Wikipedia - URLs and Knowledge Graph MIDs are provided, if available. The - associated keys are "wikipedia\_url" and "mid", respectively. + Metadata associated with the entity. For most entity types, + the metadata is a Wikipedia URL (``wikipedia_url``) and + Knowledge Graph MID (``mid``), if they are available. For the + metadata associated with other entity types, see the Type + table below. salience: The salience score associated with the entity in the [0, 1.0] range. The salience score for an entity provides information @@ -3001,7 +3017,7 @@ DESCRIPTOR=_SENTIMENT, __module__="google.cloud.language_v1beta2.proto.language_service_pb2", __doc__="""Represents the feeling associated with the entire text or entities in - the text. + the text. Next ID: 6 Attributes: @@ -3144,7 +3160,8 @@ Attributes: name: - The name of the category representing the document. + The name of the category representing the document, from the + `predefined taxonomy `__. confidence: The classifier's confidence of the category. Number represents how certain the classifier is that this category represents @@ -3166,7 +3183,7 @@ Attributes: document: - Input document. + Required. Input document. encoding_type: The encoding type used by the API to calculate sentence offsets for the sentence sentiment. @@ -3213,7 +3230,7 @@ Attributes: document: - Input document. + Required. Input document. encoding_type: The encoding type used by the API to calculate offsets. """, @@ -3258,7 +3275,7 @@ Attributes: document: - Input document. + Required. Input document. encoding_type: The encoding type used by the API to calculate offsets. """, @@ -3302,7 +3319,7 @@ Attributes: document: - Input document. + Required. Input document. encoding_type: The encoding type used by the API to calculate offsets. """, @@ -3349,7 +3366,7 @@ Attributes: document: - Input document. + Required. Input document. """, # @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.ClassifyTextRequest) ), @@ -3386,7 +3403,7 @@ __module__="google.cloud.language_v1beta2.proto.language_service_pb2", __doc__="""All available features for sentiment, syntax, and semantic analysis. Setting each one to true will enable that specific analysis for the - input. + input. Next ID: 10 Attributes: @@ -3399,7 +3416,9 @@ extract_entity_sentiment: Extract entities and their associated sentiment. classify_text: - Classify the full document into categories. + Classify the full document into categories. If this is true, + the API will use the default model which classifies into a + `predefined taxonomy `__. """, # @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.AnnotateTextRequest.Features) ), @@ -3412,9 +3431,9 @@ Attributes: document: - Input document. + Required. Input document. features: - The enabled features. + Required. The enabled features. encoding_type: The encoding type used by the API to calculate offsets. """, @@ -3470,15 +3489,24 @@ DESCRIPTOR._options = None _ENTITY_METADATAENTRY._options = None +_ANALYZESENTIMENTREQUEST.fields_by_name["document"]._options = None +_ANALYZEENTITYSENTIMENTREQUEST.fields_by_name["document"]._options = None +_ANALYZEENTITIESREQUEST.fields_by_name["document"]._options = None +_ANALYZESYNTAXREQUEST.fields_by_name["document"]._options = None +_CLASSIFYTEXTREQUEST.fields_by_name["document"]._options = None +_ANNOTATETEXTREQUEST.fields_by_name["document"]._options = None +_ANNOTATETEXTREQUEST.fields_by_name["features"]._options = None _LANGUAGESERVICE = _descriptor.ServiceDescriptor( name="LanguageService", full_name="google.cloud.language.v1beta2.LanguageService", file=DESCRIPTOR, index=0, - serialized_options=None, - serialized_start=6998, - serialized_end=8083, + serialized_options=_b( + "\312A\027language.googleapis.com\322A]https://www.googleapis.com/auth/cloud-language,https://www.googleapis.com/auth/cloud-platform" + ), + serialized_start=7094, + serialized_end=8512, methods=[ _descriptor.MethodDescriptor( name="AnalyzeSentiment", @@ -3488,7 +3516,7 @@ input_type=_ANALYZESENTIMENTREQUEST, output_type=_ANALYZESENTIMENTRESPONSE, serialized_options=_b( - '\202\323\344\223\002("#/v1beta2/documents:analyzeSentiment:\001*' + '\202\323\344\223\002("#/v1beta2/documents:analyzeSentiment:\001*\332A\026document,encoding_type\332A\010document' ), ), _descriptor.MethodDescriptor( @@ -3499,7 +3527,7 @@ input_type=_ANALYZEENTITIESREQUEST, output_type=_ANALYZEENTITIESRESPONSE, serialized_options=_b( - '\202\323\344\223\002\'""/v1beta2/documents:analyzeEntities:\001*' + '\202\323\344\223\002\'""/v1beta2/documents:analyzeEntities:\001*\332A\026document,encoding_type\332A\010document' ), ), _descriptor.MethodDescriptor( @@ -3510,7 +3538,7 @@ input_type=_ANALYZEENTITYSENTIMENTREQUEST, output_type=_ANALYZEENTITYSENTIMENTRESPONSE, serialized_options=_b( - '\202\323\344\223\002.")/v1beta2/documents:analyzeEntitySentiment:\001*' + '\202\323\344\223\002.")/v1beta2/documents:analyzeEntitySentiment:\001*\332A\026document,encoding_type\332A\010document' ), ), _descriptor.MethodDescriptor( @@ -3521,7 +3549,7 @@ input_type=_ANALYZESYNTAXREQUEST, output_type=_ANALYZESYNTAXRESPONSE, serialized_options=_b( - '\202\323\344\223\002%" /v1beta2/documents:analyzeSyntax:\001*' + '\202\323\344\223\002%" /v1beta2/documents:analyzeSyntax:\001*\332A\026document,encoding_type\332A\010document' ), ), _descriptor.MethodDescriptor( @@ -3532,7 +3560,7 @@ input_type=_CLASSIFYTEXTREQUEST, output_type=_CLASSIFYTEXTRESPONSE, serialized_options=_b( - '\202\323\344\223\002$"\037/v1beta2/documents:classifyText:\001*' + '\202\323\344\223\002$"\037/v1beta2/documents:classifyText:\001*\332A\010document' ), ), _descriptor.MethodDescriptor( @@ -3543,7 +3571,7 @@ input_type=_ANNOTATETEXTREQUEST, output_type=_ANNOTATETEXTRESPONSE, serialized_options=_b( - '\202\323\344\223\002$"\037/v1beta2/documents:annotateText:\001*' + '\202\323\344\223\002$"\037/v1beta2/documents:annotateText:\001*\332A\037document,features,encoding_type\332A\021document,features' ), ), ], diff --git a/language/google/cloud/language_v1beta2/proto/language_service_pb2_grpc.py b/language/google/cloud/language_v1beta2/proto/language_service_pb2_grpc.py index da4223708164..e0e1e4124606 100644 --- a/language/google/cloud/language_v1beta2/proto/language_service_pb2_grpc.py +++ b/language/google/cloud/language_v1beta2/proto/language_service_pb2_grpc.py @@ -71,10 +71,8 @@ def AnalyzeEntities(self, request, context): raise NotImplementedError("Method not implemented!") def AnalyzeEntitySentiment(self, request, context): - """Finds entities, similar to - [AnalyzeEntities][google.cloud.language.v1beta2.LanguageService.AnalyzeEntities] - in the text and analyzes sentiment associated with each entity and its - mentions. + """Finds entities, similar to [AnalyzeEntities][google.cloud.language.v1beta2.LanguageService.AnalyzeEntities] in the text and analyzes + sentiment associated with each entity and its mentions. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") diff --git a/language/samples/v1/test/samples.manifest.yaml b/language/samples/v1/test/samples.manifest.yaml index 28d2760ff3db..aa270425584c 100644 --- a/language/samples/v1/test/samples.manifest.yaml +++ b/language/samples/v1/test/samples.manifest.yaml @@ -6,33 +6,33 @@ base: &common chdir: '{@manifest_dir}/../..' basepath: '.' samples: -- <<: *common - path: '{basepath}/v1/language_entity_sentiment_gcs.py' - sample: 'language_entity_sentiment_gcs' - <<: *common path: '{basepath}/v1/language_classify_gcs.py' sample: 'language_classify_gcs' - <<: *common - path: '{basepath}/v1/language_syntax_gcs.py' - sample: 'language_syntax_gcs' + path: '{basepath}/v1/language_classify_text.py' + sample: 'language_classify_text' +- <<: *common + path: '{basepath}/v1/language_entities_gcs.py' + sample: 'language_entities_gcs' - <<: *common path: '{basepath}/v1/language_entities_text.py' sample: 'language_entities_text' - <<: *common - path: '{basepath}/v1/language_classify_text.py' - sample: 'language_classify_text' -- <<: *common - path: '{basepath}/v1/language_syntax_text.py' - sample: 'language_syntax_text' + path: '{basepath}/v1/language_entity_sentiment_gcs.py' + sample: 'language_entity_sentiment_gcs' - <<: *common path: '{basepath}/v1/language_entity_sentiment_text.py' sample: 'language_entity_sentiment_text' -- <<: *common - path: '{basepath}/v1/language_entities_gcs.py' - sample: 'language_entities_gcs' - <<: *common path: '{basepath}/v1/language_sentiment_gcs.py' sample: 'language_sentiment_gcs' - <<: *common path: '{basepath}/v1/language_sentiment_text.py' sample: 'language_sentiment_text' +- <<: *common + path: '{basepath}/v1/language_syntax_gcs.py' + sample: 'language_syntax_gcs' +- <<: *common + path: '{basepath}/v1/language_syntax_text.py' + sample: 'language_syntax_text' diff --git a/language/synth.metadata b/language/synth.metadata index bc5806ebdf94..8e564ada0f86 100644 --- a/language/synth.metadata +++ b/language/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-09-11T12:28:39.572337Z", + "updateTime": "2019-10-01T12:29:45.277286Z", "sources": [ { "generator": { "name": "artman", - "version": "0.36.2", - "dockerImage": "googleapis/artman@sha256:0e6f3a668cd68afc768ecbe08817cf6e56a0e64fcbdb1c58c3b97492d12418a1" + "version": "0.37.1", + "dockerImage": "googleapis/artman@sha256:6068f67900a3f0bdece596b97bda8fc70406ca0e137a941f4c81d3217c994a80" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "f1c042777e90baae0f8590f7820eed2c6ef758b2", - "internalRef": "268319807" + "sha": "ce3c574d1266026cebea3a893247790bd68191c2", + "internalRef": "272147209" } }, { diff --git a/logging/CHANGELOG.md b/logging/CHANGELOG.md index f7ad1b7451ed..05caf8d580b8 100644 --- a/logging/CHANGELOG.md +++ b/logging/CHANGELOG.md @@ -4,6 +4,35 @@ [1]: https://pypi.org/project/google-cloud-logging/#history +## 1.14.0 + +10-15-2019 06:50 PDT + + +### Implementation Changes +- Fix proto copy. ([#9420](https://github.com/googleapis/google-cloud-python/pull/9420)) + +### Dependencies +- Pin 'google-cloud-core >= 1.0.3, < 2.0.0dev'. ([#9445](https://github.com/googleapis/google-cloud-python/pull/9445)) + +## 1.13.0 + +09-23-2019 10:00 PDT + +### Implementation Changes +- Pass 'stream' argument to super in 'ContainerEngineHandler.__init__'. ([#9166](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/9166)) + +### New Features +- Add LoggingV2Servicer, LogSinks, logging_metrics, and log_entry. Add LogSeverity and HttpRequest types (via synth). ([#9262](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/9262)) +- Add client_options to logging v1 ([#9046](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/9046)) + +### Documentation +- Remove compatability badges from READMEs. ([#9035](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/9035)) + +### Internal / Testing Changes +- Docs: Remove CI for gh-pages, use googleapis.dev for api_core refs. ([#9085](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/9085)) +- Delete custom synth removing gRPC send/recv msg size limits. ([#8939](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/8939)) + ## 1.12.1 08-01-2019 09:45 PDT diff --git a/logging/docs/gapic/v2/api.rst b/logging/docs/gapic/v2/api.rst new file mode 100644 index 000000000000..2dc6bf6fcc6b --- /dev/null +++ b/logging/docs/gapic/v2/api.rst @@ -0,0 +1,6 @@ +Client for Stackdriver Logging API +================================== + +.. automodule:: google.cloud.logging_v2 + :members: + :inherited-members: \ No newline at end of file diff --git a/logging/docs/gapic/v2/types.rst b/logging/docs/gapic/v2/types.rst new file mode 100644 index 000000000000..5521d4f9bc12 --- /dev/null +++ b/logging/docs/gapic/v2/types.rst @@ -0,0 +1,5 @@ +Types for Stackdriver Logging API Client +======================================== + +.. automodule:: google.cloud.logging_v2.types + :members: \ No newline at end of file diff --git a/logging/docs/index.rst b/logging/docs/index.rst index 67ad362dfc69..f617201a90ab 100644 --- a/logging/docs/index.rst +++ b/logging/docs/index.rst @@ -1,29 +1,12 @@ .. include:: README.rst -Usage Documentation +Documentation ------------------- .. toctree:: - :maxdepth: 2 - - usage - -Api Reference -------------- -.. toctree:: - :maxdepth: 2 + :maxdepth: 3 - client - logger - entries - metric - sink - stdlib-usage - handlers - handlers-app-engine - handlers-container-engine - transports-sync - transports-thread - transports-base + v1 + v2 Changelog ~~~~~~~~~ diff --git a/logging/docs/usage.rst b/logging/docs/usage.rst index 122a850fecba..f5662bcbaa08 100644 --- a/logging/docs/usage.rst +++ b/logging/docs/usage.rst @@ -1,3 +1,6 @@ +Usage Guide +=========== + Writing log entries ------------------- diff --git a/logging/docs/v1.rst b/logging/docs/v1.rst new file mode 100644 index 000000000000..f4f79d377a65 --- /dev/null +++ b/logging/docs/v1.rst @@ -0,0 +1,18 @@ +v1 +============== +.. toctree:: + :maxdepth: 2 + + usage + client + logger + entries + metric + sink + stdlib-usage + handlers + handlers-app-engine + handlers-container-engine + transports-sync + transports-thread + transports-base \ No newline at end of file diff --git a/logging/docs/v2.rst b/logging/docs/v2.rst new file mode 100644 index 000000000000..8dfc18b48171 --- /dev/null +++ b/logging/docs/v2.rst @@ -0,0 +1,7 @@ +v2 +---------------- +.. toctree:: + :maxdepth: 2 + + gapic/v2/api + gapic/v2/types \ No newline at end of file diff --git a/logging/google/cloud/logging_v2/gapic/config_service_v2_client.py b/logging/google/cloud/logging_v2/gapic/config_service_v2_client.py index 2942e2207000..18ed3c277435 100644 --- a/logging/google/cloud/logging_v2/gapic/config_service_v2_client.py +++ b/logging/google/cloud/logging_v2/gapic/config_service_v2_client.py @@ -40,7 +40,7 @@ from google.protobuf import field_mask_pb2 -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-logging").version +_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-logging",).version class ConfigServiceV2Client(object): @@ -77,7 +77,7 @@ def from_service_account_file(cls, filename, *args, **kwargs): def billing_path(cls, billing_account): """Return a fully-qualified billing string.""" return google.api_core.path_template.expand( - "billingAccounts/{billing_account}", billing_account=billing_account + "billingAccounts/{billing_account}", billing_account=billing_account, ) @classmethod @@ -110,7 +110,7 @@ def exclusion_path(cls, project, exclusion): @classmethod def folder_path(cls, folder): """Return a fully-qualified folder string.""" - return google.api_core.path_template.expand("folders/{folder}", folder=folder) + return google.api_core.path_template.expand("folders/{folder}", folder=folder,) @classmethod def folder_exclusion_path(cls, folder, exclusion): @@ -125,14 +125,14 @@ def folder_exclusion_path(cls, folder, exclusion): def folder_sink_path(cls, folder, sink): """Return a fully-qualified folder_sink string.""" return google.api_core.path_template.expand( - "folders/{folder}/sinks/{sink}", folder=folder, sink=sink + "folders/{folder}/sinks/{sink}", folder=folder, sink=sink, ) @classmethod def organization_path(cls, organization): """Return a fully-qualified organization string.""" return google.api_core.path_template.expand( - "organizations/{organization}", organization=organization + "organizations/{organization}", organization=organization, ) @classmethod @@ -157,14 +157,14 @@ def organization_sink_path(cls, organization, sink): def project_path(cls, project): """Return a fully-qualified project string.""" return google.api_core.path_template.expand( - "projects/{project}", project=project + "projects/{project}", project=project, ) @classmethod def sink_path(cls, project, sink): """Return a fully-qualified sink string.""" return google.api_core.path_template.expand( - "projects/{project}/sinks/{sink}", project=project, sink=sink + "projects/{project}/sinks/{sink}", project=project, sink=sink, ) def __init__( @@ -254,12 +254,12 @@ def __init__( self.transport = transport else: self.transport = config_service_v2_grpc_transport.ConfigServiceV2GrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials + address=api_endpoint, channel=channel, credentials=credentials, ) if client_info is None: client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION + gapic_version=_GAPIC_LIBRARY_VERSION, ) else: client_info.gapic_version = _GAPIC_LIBRARY_VERSION @@ -270,7 +270,7 @@ def __init__( # (Ordinarily, these are the defaults specified in the `*_config.py` # file next to this one.) self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config["interfaces"][self._INTERFACE_NAME] + client_config["interfaces"][self._INTERFACE_NAME], ) # Save a dictionary of cached API call functions. @@ -360,7 +360,7 @@ def list_sinks( ) request = logging_config_pb2.ListSinksRequest( - parent=parent, page_size=page_size + parent=parent, page_size=page_size, ) if metadata is None: metadata = [] @@ -450,7 +450,7 @@ def get_sink( client_info=self._client_info, ) - request = logging_config_pb2.GetSinkRequest(sink_name=sink_name) + request = logging_config_pb2.GetSinkRequest(sink_name=sink_name,) if metadata is None: metadata = [] metadata = list(metadata) @@ -554,7 +554,7 @@ def create_sink( ) request = logging_config_pb2.CreateSinkRequest( - parent=parent, sink=sink, unique_writer_identity=unique_writer_identity + parent=parent, sink=sink, unique_writer_identity=unique_writer_identity, ) if metadata is None: metadata = [] @@ -759,7 +759,7 @@ def delete_sink( client_info=self._client_info, ) - request = logging_config_pb2.DeleteSinkRequest(sink_name=sink_name) + request = logging_config_pb2.DeleteSinkRequest(sink_name=sink_name,) if metadata is None: metadata = [] metadata = list(metadata) @@ -857,7 +857,7 @@ def list_exclusions( ) request = logging_config_pb2.ListExclusionsRequest( - parent=parent, page_size=page_size + parent=parent, page_size=page_size, ) if metadata is None: metadata = [] @@ -947,7 +947,7 @@ def get_exclusion( client_info=self._client_info, ) - request = logging_config_pb2.GetExclusionRequest(name=name) + request = logging_config_pb2.GetExclusionRequest(name=name,) if metadata is None: metadata = [] metadata = list(metadata) @@ -1038,7 +1038,7 @@ def create_exclusion( ) request = logging_config_pb2.CreateExclusionRequest( - parent=parent, exclusion=exclusion + parent=parent, exclusion=exclusion, ) if metadata is None: metadata = [] @@ -1142,7 +1142,7 @@ def update_exclusion( ) request = logging_config_pb2.UpdateExclusionRequest( - name=name, exclusion=exclusion, update_mask=update_mask + name=name, exclusion=exclusion, update_mask=update_mask, ) if metadata is None: metadata = [] @@ -1218,7 +1218,7 @@ def delete_exclusion( client_info=self._client_info, ) - request = logging_config_pb2.DeleteExclusionRequest(name=name) + request = logging_config_pb2.DeleteExclusionRequest(name=name,) if metadata is None: metadata = [] metadata = list(metadata) diff --git a/logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py b/logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py index 09509318a0a6..778ba747d83c 100644 --- a/logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py +++ b/logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py @@ -44,7 +44,7 @@ from google.protobuf import field_mask_pb2 -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-logging").version +_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-logging",).version class LoggingServiceV2Client(object): @@ -81,7 +81,7 @@ def from_service_account_file(cls, filename, *args, **kwargs): def billing_path(cls, billing_account): """Return a fully-qualified billing string.""" return google.api_core.path_template.expand( - "billingAccounts/{billing_account}", billing_account=billing_account + "billingAccounts/{billing_account}", billing_account=billing_account, ) @classmethod @@ -96,27 +96,27 @@ def billing_log_path(cls, billing_account, log): @classmethod def folder_path(cls, folder): """Return a fully-qualified folder string.""" - return google.api_core.path_template.expand("folders/{folder}", folder=folder) + return google.api_core.path_template.expand("folders/{folder}", folder=folder,) @classmethod def folder_log_path(cls, folder, log): """Return a fully-qualified folder_log string.""" return google.api_core.path_template.expand( - "folders/{folder}/logs/{log}", folder=folder, log=log + "folders/{folder}/logs/{log}", folder=folder, log=log, ) @classmethod def log_path(cls, project, log): """Return a fully-qualified log string.""" return google.api_core.path_template.expand( - "projects/{project}/logs/{log}", project=project, log=log + "projects/{project}/logs/{log}", project=project, log=log, ) @classmethod def organization_path(cls, organization): """Return a fully-qualified organization string.""" return google.api_core.path_template.expand( - "organizations/{organization}", organization=organization + "organizations/{organization}", organization=organization, ) @classmethod @@ -132,7 +132,7 @@ def organization_log_path(cls, organization, log): def project_path(cls, project): """Return a fully-qualified project string.""" return google.api_core.path_template.expand( - "projects/{project}", project=project + "projects/{project}", project=project, ) def __init__( @@ -222,12 +222,12 @@ def __init__( self.transport = transport else: self.transport = logging_service_v2_grpc_transport.LoggingServiceV2GrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials + address=api_endpoint, channel=channel, credentials=credentials, ) if client_info is None: client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION + gapic_version=_GAPIC_LIBRARY_VERSION, ) else: client_info.gapic_version = _GAPIC_LIBRARY_VERSION @@ -238,7 +238,7 @@ def __init__( # (Ordinarily, these are the defaults specified in the `*_config.py` # file next to this one.) self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config["interfaces"][self._INTERFACE_NAME] + client_config["interfaces"][self._INTERFACE_NAME], ) # Save a dictionary of cached API call functions. @@ -311,7 +311,7 @@ def delete_log( client_info=self._client_info, ) - request = logging_pb2.DeleteLogRequest(log_name=log_name) + request = logging_pb2.DeleteLogRequest(log_name=log_name,) if metadata is None: metadata = [] metadata = list(metadata) @@ -679,7 +679,7 @@ def list_monitored_resource_descriptors( ) request = logging_pb2.ListMonitoredResourceDescriptorsRequest( - page_size=page_size + page_size=page_size, ) iterator = google.api_core.page_iterator.GRPCIterator( client=None, @@ -776,7 +776,7 @@ def list_logs( client_info=self._client_info, ) - request = logging_pb2.ListLogsRequest(parent=parent, page_size=page_size) + request = logging_pb2.ListLogsRequest(parent=parent, page_size=page_size,) if metadata is None: metadata = [] metadata = list(metadata) diff --git a/logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py b/logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py index 59dae9c7a78f..278f1365153d 100644 --- a/logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py +++ b/logging/google/cloud/logging_v2/gapic/metrics_service_v2_client.py @@ -46,7 +46,7 @@ from google.protobuf import field_mask_pb2 -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-logging").version +_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-logging",).version class MetricsServiceV2Client(object): @@ -83,33 +83,33 @@ def from_service_account_file(cls, filename, *args, **kwargs): def billing_path(cls, billing_account): """Return a fully-qualified billing string.""" return google.api_core.path_template.expand( - "billingAccounts/{billing_account}", billing_account=billing_account + "billingAccounts/{billing_account}", billing_account=billing_account, ) @classmethod def folder_path(cls, folder): """Return a fully-qualified folder string.""" - return google.api_core.path_template.expand("folders/{folder}", folder=folder) + return google.api_core.path_template.expand("folders/{folder}", folder=folder,) @classmethod def metric_path(cls, project, metric): """Return a fully-qualified metric string.""" return google.api_core.path_template.expand( - "projects/{project}/metrics/{metric}", project=project, metric=metric + "projects/{project}/metrics/{metric}", project=project, metric=metric, ) @classmethod def organization_path(cls, organization): """Return a fully-qualified organization string.""" return google.api_core.path_template.expand( - "organizations/{organization}", organization=organization + "organizations/{organization}", organization=organization, ) @classmethod def project_path(cls, project): """Return a fully-qualified project string.""" return google.api_core.path_template.expand( - "projects/{project}", project=project + "projects/{project}", project=project, ) def __init__( @@ -199,12 +199,12 @@ def __init__( self.transport = transport else: self.transport = metrics_service_v2_grpc_transport.MetricsServiceV2GrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials + address=api_endpoint, channel=channel, credentials=credentials, ) if client_info is None: client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION + gapic_version=_GAPIC_LIBRARY_VERSION, ) else: client_info.gapic_version = _GAPIC_LIBRARY_VERSION @@ -215,7 +215,7 @@ def __init__( # (Ordinarily, these are the defaults specified in the `*_config.py` # file next to this one.) self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config["interfaces"][self._INTERFACE_NAME] + client_config["interfaces"][self._INTERFACE_NAME], ) # Save a dictionary of cached API call functions. @@ -302,7 +302,7 @@ def list_log_metrics( ) request = logging_metrics_pb2.ListLogMetricsRequest( - parent=parent, page_size=page_size + parent=parent, page_size=page_size, ) if metadata is None: metadata = [] @@ -387,7 +387,7 @@ def get_log_metric( client_info=self._client_info, ) - request = logging_metrics_pb2.GetLogMetricRequest(metric_name=metric_name) + request = logging_metrics_pb2.GetLogMetricRequest(metric_name=metric_name,) if metadata is None: metadata = [] metadata = list(metadata) @@ -472,7 +472,7 @@ def create_log_metric( ) request = logging_metrics_pb2.CreateLogMetricRequest( - parent=parent, metric=metric + parent=parent, metric=metric, ) if metadata is None: metadata = [] @@ -559,7 +559,7 @@ def update_log_metric( ) request = logging_metrics_pb2.UpdateLogMetricRequest( - metric_name=metric_name, metric=metric + metric_name=metric_name, metric=metric, ) if metadata is None: metadata = [] @@ -630,7 +630,7 @@ def delete_log_metric( client_info=self._client_info, ) - request = logging_metrics_pb2.DeleteLogMetricRequest(metric_name=metric_name) + request = logging_metrics_pb2.DeleteLogMetricRequest(metric_name=metric_name,) if metadata is None: metadata = [] metadata = list(metadata) diff --git a/logging/google/cloud/logging_v2/gapic/transports/config_service_v2_grpc_transport.py b/logging/google/cloud/logging_v2/gapic/transports/config_service_v2_grpc_transport.py index 533895087231..b85abcd58a78 100644 --- a/logging/google/cloud/logging_v2/gapic/transports/config_service_v2_grpc_transport.py +++ b/logging/google/cloud/logging_v2/gapic/transports/config_service_v2_grpc_transport.py @@ -59,7 +59,7 @@ def __init__( # exception (channels come with credentials baked in already). if channel is not None and credentials is not None: raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive." + "The `channel` and `credentials` arguments are mutually " "exclusive.", ) # Create the channel. @@ -80,7 +80,7 @@ def __init__( self._stubs = { "config_service_v2_stub": logging_config_pb2_grpc.ConfigServiceV2Stub( channel - ) + ), } @classmethod diff --git a/logging/google/cloud/logging_v2/gapic/transports/logging_service_v2_grpc_transport.py b/logging/google/cloud/logging_v2/gapic/transports/logging_service_v2_grpc_transport.py index 4477ad701b5e..f6ab3ab8876c 100644 --- a/logging/google/cloud/logging_v2/gapic/transports/logging_service_v2_grpc_transport.py +++ b/logging/google/cloud/logging_v2/gapic/transports/logging_service_v2_grpc_transport.py @@ -59,7 +59,7 @@ def __init__( # exception (channels come with credentials baked in already). if channel is not None and credentials is not None: raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive." + "The `channel` and `credentials` arguments are mutually " "exclusive.", ) # Create the channel. @@ -78,7 +78,7 @@ def __init__( # gRPC uses objects called "stubs" that are bound to the # channel and provide a basic method for each RPC. self._stubs = { - "logging_service_v2_stub": logging_pb2_grpc.LoggingServiceV2Stub(channel) + "logging_service_v2_stub": logging_pb2_grpc.LoggingServiceV2Stub(channel), } @classmethod diff --git a/logging/google/cloud/logging_v2/gapic/transports/metrics_service_v2_grpc_transport.py b/logging/google/cloud/logging_v2/gapic/transports/metrics_service_v2_grpc_transport.py index 426edce6edd7..bc66722729bb 100644 --- a/logging/google/cloud/logging_v2/gapic/transports/metrics_service_v2_grpc_transport.py +++ b/logging/google/cloud/logging_v2/gapic/transports/metrics_service_v2_grpc_transport.py @@ -59,7 +59,7 @@ def __init__( # exception (channels come with credentials baked in already). if channel is not None and credentials is not None: raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive." + "The `channel` and `credentials` arguments are mutually " "exclusive.", ) # Create the channel. @@ -80,7 +80,7 @@ def __init__( self._stubs = { "metrics_service_v2_stub": logging_metrics_pb2_grpc.MetricsServiceV2Stub( channel - ) + ), } @classmethod diff --git a/logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/log_entry_pb2.py b/logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/log_entry_pb2.py deleted file mode 100644 index 1f2b1ca3b64d..000000000000 --- a/logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/log_entry_pb2.py +++ /dev/null @@ -1,873 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/logging_v2/proto/log_entry.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import ( - monitored_resource_pb2 as google_dot_api_dot_monitored__resource__pb2, -) -from google.logging.type import ( - http_request_pb2 as google_dot_logging_dot_type_dot_http__request__pb2, -) -from google.logging.type import ( - log_severity_pb2 as google_dot_logging_dot_type_dot_log__severity__pb2, -) -from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2 -from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -from google.rpc import status_pb2 as google_dot_rpc_dot_status__pb2 -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/logging_v2/proto/log_entry.proto", - package="google.logging.v2", - syntax="proto3", - serialized_options=_b( - "\n\025com.google.logging.v2B\rLogEntryProtoP\001Z8google.golang.org/genproto/googleapis/logging/v2;logging\370\001\001\252\002\027Google.Cloud.Logging.V2\312\002\027Google\\Cloud\\Logging\\V2" - ), - serialized_pb=_b( - '\n-google/cloud/logging_v2/proto/log_entry.proto\x12\x11google.logging.v2\x1a#google/api/monitored_resource.proto\x1a&google/logging/type/http_request.proto\x1a&google/logging/type/log_severity.proto\x1a\x19google/protobuf/any.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto\x1a\x1cgoogle/api/annotations.proto"\x8e\x06\n\x08LogEntry\x12\x10\n\x08log_name\x18\x0c \x01(\t\x12/\n\x08resource\x18\x08 \x01(\x0b\x32\x1d.google.api.MonitoredResource\x12-\n\rproto_payload\x18\x02 \x01(\x0b\x32\x14.google.protobuf.AnyH\x00\x12\x16\n\x0ctext_payload\x18\x03 \x01(\tH\x00\x12/\n\x0cjson_payload\x18\x06 \x01(\x0b\x32\x17.google.protobuf.StructH\x00\x12-\n\ttimestamp\x18\t \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x35\n\x11receive_timestamp\x18\x18 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x32\n\x08severity\x18\n \x01(\x0e\x32 .google.logging.type.LogSeverity\x12\x11\n\tinsert_id\x18\x04 \x01(\t\x12\x36\n\x0chttp_request\x18\x07 \x01(\x0b\x32 .google.logging.type.HttpRequest\x12\x37\n\x06labels\x18\x0b \x03(\x0b\x32\'.google.logging.v2.LogEntry.LabelsEntry\x12;\n\x08metadata\x18\x19 \x01(\x0b\x32%.google.api.MonitoredResourceMetadataB\x02\x18\x01\x12\x37\n\toperation\x18\x0f \x01(\x0b\x32$.google.logging.v2.LogEntryOperation\x12\r\n\x05trace\x18\x16 \x01(\t\x12\x0f\n\x07span_id\x18\x1b \x01(\t\x12\x15\n\rtrace_sampled\x18\x1e \x01(\x08\x12\x42\n\x0fsource_location\x18\x17 \x01(\x0b\x32).google.logging.v2.LogEntrySourceLocation\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\t\n\x07payload"N\n\x11LogEntryOperation\x12\n\n\x02id\x18\x01 \x01(\t\x12\x10\n\x08producer\x18\x02 \x01(\t\x12\r\n\x05\x66irst\x18\x03 \x01(\x08\x12\x0c\n\x04last\x18\x04 \x01(\x08"F\n\x16LogEntrySourceLocation\x12\x0c\n\x04\x66ile\x18\x01 \x01(\t\x12\x0c\n\x04line\x18\x02 \x01(\x03\x12\x10\n\x08\x66unction\x18\x03 \x01(\tB\x99\x01\n\x15\x63om.google.logging.v2B\rLogEntryProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3' - ), - dependencies=[ - google_dot_api_dot_monitored__resource__pb2.DESCRIPTOR, - google_dot_logging_dot_type_dot_http__request__pb2.DESCRIPTOR, - google_dot_logging_dot_type_dot_log__severity__pb2.DESCRIPTOR, - google_dot_protobuf_dot_any__pb2.DESCRIPTOR, - google_dot_protobuf_dot_struct__pb2.DESCRIPTOR, - google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, - google_dot_rpc_dot_status__pb2.DESCRIPTOR, - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - ], -) - - -_LOGENTRY_LABELSENTRY = _descriptor.Descriptor( - name="LabelsEntry", - full_name="google.logging.v2.LogEntry.LabelsEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.logging.v2.LogEntry.LabelsEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.logging.v2.LogEntry.LabelsEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b("8\001"), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1057, - serialized_end=1102, -) - -_LOGENTRY = _descriptor.Descriptor( - name="LogEntry", - full_name="google.logging.v2.LogEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="log_name", - full_name="google.logging.v2.LogEntry.log_name", - index=0, - number=12, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="resource", - full_name="google.logging.v2.LogEntry.resource", - index=1, - number=8, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="proto_payload", - full_name="google.logging.v2.LogEntry.proto_payload", - index=2, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="text_payload", - full_name="google.logging.v2.LogEntry.text_payload", - index=3, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="json_payload", - full_name="google.logging.v2.LogEntry.json_payload", - index=4, - number=6, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="timestamp", - full_name="google.logging.v2.LogEntry.timestamp", - index=5, - number=9, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="receive_timestamp", - full_name="google.logging.v2.LogEntry.receive_timestamp", - index=6, - number=24, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="severity", - full_name="google.logging.v2.LogEntry.severity", - index=7, - number=10, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="insert_id", - full_name="google.logging.v2.LogEntry.insert_id", - index=8, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="http_request", - full_name="google.logging.v2.LogEntry.http_request", - index=9, - number=7, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="labels", - full_name="google.logging.v2.LogEntry.labels", - index=10, - number=11, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="metadata", - full_name="google.logging.v2.LogEntry.metadata", - index=11, - number=25, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\030\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="operation", - full_name="google.logging.v2.LogEntry.operation", - index=12, - number=15, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="trace", - full_name="google.logging.v2.LogEntry.trace", - index=13, - number=22, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="span_id", - full_name="google.logging.v2.LogEntry.span_id", - index=14, - number=27, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="trace_sampled", - full_name="google.logging.v2.LogEntry.trace_sampled", - index=15, - number=30, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="source_location", - full_name="google.logging.v2.LogEntry.source_location", - index=16, - number=23, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_LOGENTRY_LABELSENTRY], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="payload", - full_name="google.logging.v2.LogEntry.payload", - index=0, - containing_type=None, - fields=[], - ) - ], - serialized_start=331, - serialized_end=1113, -) - - -_LOGENTRYOPERATION = _descriptor.Descriptor( - name="LogEntryOperation", - full_name="google.logging.v2.LogEntryOperation", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="id", - full_name="google.logging.v2.LogEntryOperation.id", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="producer", - full_name="google.logging.v2.LogEntryOperation.producer", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="first", - full_name="google.logging.v2.LogEntryOperation.first", - index=2, - number=3, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="last", - full_name="google.logging.v2.LogEntryOperation.last", - index=3, - number=4, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1115, - serialized_end=1193, -) - - -_LOGENTRYSOURCELOCATION = _descriptor.Descriptor( - name="LogEntrySourceLocation", - full_name="google.logging.v2.LogEntrySourceLocation", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="file", - full_name="google.logging.v2.LogEntrySourceLocation.file", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="line", - full_name="google.logging.v2.LogEntrySourceLocation.line", - index=1, - number=2, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="function", - full_name="google.logging.v2.LogEntrySourceLocation.function", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1195, - serialized_end=1265, -) - -_LOGENTRY_LABELSENTRY.containing_type = _LOGENTRY -_LOGENTRY.fields_by_name[ - "resource" -].message_type = google_dot_api_dot_monitored__resource__pb2._MONITOREDRESOURCE -_LOGENTRY.fields_by_name[ - "proto_payload" -].message_type = google_dot_protobuf_dot_any__pb2._ANY -_LOGENTRY.fields_by_name[ - "json_payload" -].message_type = google_dot_protobuf_dot_struct__pb2._STRUCT -_LOGENTRY.fields_by_name[ - "timestamp" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_LOGENTRY.fields_by_name[ - "receive_timestamp" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_LOGENTRY.fields_by_name[ - "severity" -].enum_type = google_dot_logging_dot_type_dot_log__severity__pb2._LOGSEVERITY -_LOGENTRY.fields_by_name[ - "http_request" -].message_type = google_dot_logging_dot_type_dot_http__request__pb2._HTTPREQUEST -_LOGENTRY.fields_by_name["labels"].message_type = _LOGENTRY_LABELSENTRY -_LOGENTRY.fields_by_name[ - "metadata" -].message_type = google_dot_api_dot_monitored__resource__pb2._MONITOREDRESOURCEMETADATA -_LOGENTRY.fields_by_name["operation"].message_type = _LOGENTRYOPERATION -_LOGENTRY.fields_by_name["source_location"].message_type = _LOGENTRYSOURCELOCATION -_LOGENTRY.oneofs_by_name["payload"].fields.append( - _LOGENTRY.fields_by_name["proto_payload"] -) -_LOGENTRY.fields_by_name["proto_payload"].containing_oneof = _LOGENTRY.oneofs_by_name[ - "payload" -] -_LOGENTRY.oneofs_by_name["payload"].fields.append( - _LOGENTRY.fields_by_name["text_payload"] -) -_LOGENTRY.fields_by_name["text_payload"].containing_oneof = _LOGENTRY.oneofs_by_name[ - "payload" -] -_LOGENTRY.oneofs_by_name["payload"].fields.append( - _LOGENTRY.fields_by_name["json_payload"] -) -_LOGENTRY.fields_by_name["json_payload"].containing_oneof = _LOGENTRY.oneofs_by_name[ - "payload" -] -DESCRIPTOR.message_types_by_name["LogEntry"] = _LOGENTRY -DESCRIPTOR.message_types_by_name["LogEntryOperation"] = _LOGENTRYOPERATION -DESCRIPTOR.message_types_by_name["LogEntrySourceLocation"] = _LOGENTRYSOURCELOCATION -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -LogEntry = _reflection.GeneratedProtocolMessageType( - "LogEntry", - (_message.Message,), - dict( - LabelsEntry=_reflection.GeneratedProtocolMessageType( - "LabelsEntry", - (_message.Message,), - dict( - DESCRIPTOR=_LOGENTRY_LABELSENTRY, - __module__="google.cloud.logging_v2.proto.log_entry_pb2" - # @@protoc_insertion_point(class_scope:google.logging.v2.LogEntry.LabelsEntry) - ), - ), - DESCRIPTOR=_LOGENTRY, - __module__="google.cloud.logging_v2.proto.log_entry_pb2", - __doc__="""An individual entry in a log. - - - Attributes: - log_name: - Required. The resource name of the log to which this log entry - belongs: :: "projects/[PROJECT_ID]/logs/[LOG_ID]" - "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" - "folders/[FOLDER_ID]/logs/[LOG_ID]" A project number may - optionally be used in place of PROJECT\_ID. The project number - is translated to its corresponding PROJECT\_ID internally and - the ``log_name`` field will contain PROJECT\_ID in queries and - exports. ``[LOG_ID]`` must be URL-encoded within - ``log_name``. Example: ``"organizations/1234567890/logs/cloudr - esourcemanager.googleapis.com%2Factivity"``. ``[LOG_ID]`` must - be less than 512 characters long and can only include the - following characters: upper and lower case alphanumeric - characters, forward-slash, underscore, hyphen, and period. - For backward compatibility, if ``log_name`` begins with a - forward-slash, such as ``/projects/...``, then the log entry - is ingested as usual but the forward-slash is removed. Listing - the log entry will not show the leading slash and filtering - for a log name with a leading slash will never return any - results. - resource: - Required. The monitored resource that produced this log entry. - Example: a log entry that reports a database error would be - associated with the monitored resource designating the - particular database that reported the error. - payload: - Optional. The log entry payload, which can be one of multiple - types. - proto_payload: - The log entry payload, represented as a protocol buffer. Some - Google Cloud Platform services use this field for their log - entry payloads. The following protocol buffer types are - supported; user-defined types are not supported: - "type.googleapis.com/google.cloud.audit.AuditLog" - "type.googleapis.com/google.appengine.logging.v1.RequestLog" - text_payload: - The log entry payload, represented as a Unicode string - (UTF-8). - json_payload: - The log entry payload, represented as a structure that is - expressed as a JSON object. - timestamp: - Optional. The time the event described by the log entry - occurred. This time is used to compute the log entry's age and - to enforce the logs retention period. If this field is omitted - in a new log entry, then Logging assigns it the current time. - Timestamps have nanosecond accuracy, but trailing zeros in the - fractional seconds might be omitted when the timestamp is - displayed. Incoming log entries should have timestamps that - are no more than the `logs retention period - `__ in the past, and no more than 24 hours in - the future. Log entries outside those time boundaries will not - be available when calling ``entries.list``, but those log - entries can still be `exported with LogSinks - `__. - receive_timestamp: - Output only. The time the log entry was received by Logging. - severity: - Optional. The severity of the log entry. The default value is - ``LogSeverity.DEFAULT``. - insert_id: - Optional. A unique identifier for the log entry. If you - provide a value, then Logging considers other log entries in - the same project, with the same ``timestamp``, and with the - same ``insert_id`` to be duplicates which can be removed. If - omitted in new log entries, then Logging assigns its own - unique identifier. The ``insert_id`` is also used to order log - entries that have the same ``timestamp`` value. - http_request: - Optional. Information about the HTTP request associated with - this log entry, if applicable. - labels: - Optional. A set of user-defined (key, value) data that - provides additional information about the log entry. - metadata: - Deprecated. Output only. Additional metadata about the - monitored resource. Only ``k8s_container``, ``k8s_pod``, and - ``k8s_node`` MonitoredResources have this field populated for - GKE versions older than 1.12.6. For GKE versions 1.12.6 and - above, the ``metadata`` field has been deprecated. The - Kubernetes pod labels that used to be in - ``metadata.userLabels`` will now be present in the ``labels`` - field with a key prefix of ``k8s-pod/``. The Stackdriver - system labels that were present in the - ``metadata.systemLabels`` field will no longer be available in - the LogEntry. - operation: - Optional. Information about an operation associated with the - log entry, if applicable. - trace: - Optional. Resource name of the trace associated with the log - entry, if any. If it contains a relative resource name, the - name is assumed to be relative to - ``//tracing.googleapis.com``. Example: ``projects/my- - projectid/traces/06796866738c859f2f19b7cfb3214824`` - span_id: - Optional. The span ID within the trace associated with the log - entry. For Trace spans, this is the same format that the - Trace API v2 uses: a 16-character hexadecimal encoding of an - 8-byte array, such as "000000000000004a". - trace_sampled: - Optional. The sampling decision of the trace associated with - the log entry. True means that the trace resource name in the - ``trace`` field was sampled for storage in a trace backend. - False means that the trace was not sampled for storage when - this log entry was written, or the sampling decision was - unknown at the time. A non-sampled ``trace`` value is still - useful as a request correlation identifier. The default is - False. - source_location: - Optional. Source code location information associated with the - log entry, if any. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.LogEntry) - ), -) -_sym_db.RegisterMessage(LogEntry) -_sym_db.RegisterMessage(LogEntry.LabelsEntry) - -LogEntryOperation = _reflection.GeneratedProtocolMessageType( - "LogEntryOperation", - (_message.Message,), - dict( - DESCRIPTOR=_LOGENTRYOPERATION, - __module__="google.cloud.logging_v2.proto.log_entry_pb2", - __doc__="""Additional information about a potentially long-running operation with - which a log entry is associated. - - - Attributes: - id: - Optional. An arbitrary operation identifier. Log entries with - the same identifier are assumed to be part of the same - operation. - producer: - Optional. An arbitrary producer identifier. The combination of - ``id`` and ``producer`` must be globally unique. Examples for - ``producer``: ``"MyDivision.MyBigCompany.com"``, - ``"github.com/MyProject/MyApplication"``. - first: - Optional. Set this to True if this is the first log entry in - the operation. - last: - Optional. Set this to True if this is the last log entry in - the operation. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.LogEntryOperation) - ), -) -_sym_db.RegisterMessage(LogEntryOperation) - -LogEntrySourceLocation = _reflection.GeneratedProtocolMessageType( - "LogEntrySourceLocation", - (_message.Message,), - dict( - DESCRIPTOR=_LOGENTRYSOURCELOCATION, - __module__="google.cloud.logging_v2.proto.log_entry_pb2", - __doc__="""Additional information about the source code location that produced the - log entry. - - - Attributes: - file: - Optional. Source file name. Depending on the runtime - environment, this might be a simple name or a fully-qualified - name. - line: - Optional. Line within the source file. 1-based; 0 indicates no - line number available. - function: - Optional. Human-readable name of the function or method being - invoked, with optional context such as the class or package - name. This information may be used in contexts such as the - logs viewer, where a file and line number are less meaningful. - The format can vary by language. For example: - ``qual.if.ied.Class.method`` (Java), ``dir/package.func`` - (Go), ``function`` (Python). - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.LogEntrySourceLocation) - ), -) -_sym_db.RegisterMessage(LogEntrySourceLocation) - - -DESCRIPTOR._options = None -_LOGENTRY_LABELSENTRY._options = None -_LOGENTRY.fields_by_name["metadata"]._options = None -# @@protoc_insertion_point(module_scope) diff --git a/logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/logging_config_pb2.py b/logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/logging_config_pb2.py deleted file mode 100644 index 144591e49189..000000000000 --- a/logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/logging_config_pb2.py +++ /dev/null @@ -1,1857 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/logging_v2/proto/logging_config.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 -from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.api import client_pb2 as google_dot_api_dot_client__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/logging_v2/proto/logging_config.proto", - package="google.logging.v2", - syntax="proto3", - serialized_options=_b( - "\n\025com.google.logging.v2B\022LoggingConfigProtoP\001Z8google.golang.org/genproto/googleapis/logging/v2;logging\370\001\001\252\002\027Google.Cloud.Logging.V2\312\002\027Google\\Cloud\\Logging\\V2" - ), - serialized_pb=_b( - '\n2google/cloud/logging_v2/proto/logging_config.proto\x12\x11google.logging.v2\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto"\xaa\x03\n\x07LogSink\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65stination\x18\x03 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x05 \x01(\t\x12K\n\x15output_version_format\x18\x06 \x01(\x0e\x32(.google.logging.v2.LogSink.VersionFormatB\x02\x18\x01\x12\x17\n\x0fwriter_identity\x18\x08 \x01(\t\x12\x18\n\x10include_children\x18\t \x01(\x08\x12>\n\x10\x62igquery_options\x18\x0c \x01(\x0b\x32".google.logging.v2.BigQueryOptionsH\x00\x12/\n\x0b\x63reate_time\x18\r \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\x0e \x01(\x0b\x32\x1a.google.protobuf.Timestamp"?\n\rVersionFormat\x12\x1e\n\x1aVERSION_FORMAT_UNSPECIFIED\x10\x00\x12\x06\n\x02V2\x10\x01\x12\x06\n\x02V1\x10\x02\x42\t\n\x07options"1\n\x0f\x42igQueryOptions\x12\x1e\n\x16use_partitioned_tables\x18\x01 \x01(\x08"I\n\x10ListSinksRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05"W\n\x11ListSinksResponse\x12)\n\x05sinks\x18\x01 \x03(\x0b\x32\x1a.google.logging.v2.LogSink\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"#\n\x0eGetSinkRequest\x12\x11\n\tsink_name\x18\x01 \x01(\t"m\n\x11\x43reateSinkRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12(\n\x04sink\x18\x02 \x01(\x0b\x32\x1a.google.logging.v2.LogSink\x12\x1e\n\x16unique_writer_identity\x18\x03 \x01(\x08"\xa1\x01\n\x11UpdateSinkRequest\x12\x11\n\tsink_name\x18\x01 \x01(\t\x12(\n\x04sink\x18\x02 \x01(\x0b\x32\x1a.google.logging.v2.LogSink\x12\x1e\n\x16unique_writer_identity\x18\x03 \x01(\x08\x12/\n\x0bupdate_mask\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"&\n\x11\x44\x65leteSinkRequest\x12\x11\n\tsink_name\x18\x01 \x01(\t"\xb5\x01\n\x0cLogExclusion\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x03 \x01(\t\x12\x10\n\x08\x64isabled\x18\x04 \x01(\x08\x12/\n\x0b\x63reate_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"N\n\x15ListExclusionsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05"f\n\x16ListExclusionsResponse\x12\x33\n\nexclusions\x18\x01 \x03(\x0b\x32\x1f.google.logging.v2.LogExclusion\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"#\n\x13GetExclusionRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"\\\n\x16\x43reateExclusionRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x32\n\texclusion\x18\x02 \x01(\x0b\x32\x1f.google.logging.v2.LogExclusion"\x8b\x01\n\x16UpdateExclusionRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x32\n\texclusion\x18\x02 \x01(\x0b\x32\x1f.google.logging.v2.LogExclusion\x12/\n\x0bupdate_mask\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"&\n\x16\x44\x65leteExclusionRequest\x12\x0c\n\x04name\x18\x01 \x01(\t2\xf6\x1a\n\x0f\x43onfigServiceV2\x12\x87\x02\n\tListSinks\x12#.google.logging.v2.ListSinksRequest\x1a$.google.logging.v2.ListSinksResponse"\xae\x01\x82\xd3\xe4\x93\x02\xa7\x01\x12\x16/v2/{parent=*/*}/sinksZ\x1f\x12\x1d/v2/{parent=projects/*}/sinksZ$\x12"/v2/{parent=organizations/*}/sinksZ\x1e\x12\x1c/v2/{parent=folders/*}/sinksZ&\x12$/v2/{parent=billingAccounts/*}/sinks\x12\x92\x02\n\x07GetSink\x12!.google.logging.v2.GetSinkRequest\x1a\x1a.google.logging.v2.LogSink"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01\x12\x1b/v2/{sink_name=*/*/sinks/*}Z$\x12"/v2/{sink_name=projects/*/sinks/*}Z)\x12\'/v2/{sink_name=organizations/*/sinks/*}Z#\x12!/v2/{sink_name=folders/*/sinks/*}Z+\x12)/v2/{sink_name=billingAccounts/*/sinks/*}\x12\x9d\x02\n\nCreateSink\x12$.google.logging.v2.CreateSinkRequest\x1a\x1a.google.logging.v2.LogSink"\xcc\x01\x82\xd3\xe4\x93\x02\xc5\x01"\x16/v2/{parent=*/*}/sinks:\x04sinkZ%"\x1d/v2/{parent=projects/*}/sinks:\x04sinkZ*""/v2/{parent=organizations/*}/sinks:\x04sinkZ$"\x1c/v2/{parent=folders/*}/sinks:\x04sinkZ,"$/v2/{parent=billingAccounts/*}/sinks:\x04sink\x12\xf1\x03\n\nUpdateSink\x12$.google.logging.v2.UpdateSinkRequest\x1a\x1a.google.logging.v2.LogSink"\xa0\x03\x82\xd3\xe4\x93\x02\x99\x03\x1a\x1b/v2/{sink_name=*/*/sinks/*}:\x04sinkZ*\x1a"/v2/{sink_name=projects/*/sinks/*}:\x04sinkZ/\x1a\'/v2/{sink_name=organizations/*/sinks/*}:\x04sinkZ)\x1a!/v2/{sink_name=folders/*/sinks/*}:\x04sinkZ1\x1a)/v2/{sink_name=billingAccounts/*/sinks/*}:\x04sinkZ*2"/v2/{sink_name=projects/*/sinks/*}:\x04sinkZ/2\'/v2/{sink_name=organizations/*/sinks/*}:\x04sinkZ)2!/v2/{sink_name=folders/*/sinks/*}:\x04sinkZ12)/v2/{sink_name=billingAccounts/*/sinks/*}:\x04sink\x12\x94\x02\n\nDeleteSink\x12$.google.logging.v2.DeleteSinkRequest\x1a\x16.google.protobuf.Empty"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01*\x1b/v2/{sink_name=*/*/sinks/*}Z$*"/v2/{sink_name=projects/*/sinks/*}Z)*\'/v2/{sink_name=organizations/*/sinks/*}Z#*!/v2/{sink_name=folders/*/sinks/*}Z+*)/v2/{sink_name=billingAccounts/*/sinks/*}\x12\xaf\x02\n\x0eListExclusions\x12(.google.logging.v2.ListExclusionsRequest\x1a).google.logging.v2.ListExclusionsResponse"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01\x12\x1b/v2/{parent=*/*}/exclusionsZ$\x12"/v2/{parent=projects/*}/exclusionsZ)\x12\'/v2/{parent=organizations/*}/exclusionsZ#\x12!/v2/{parent=folders/*}/exclusionsZ+\x12)/v2/{parent=billingAccounts/*}/exclusions\x12\xa1\x02\n\x0cGetExclusion\x12&.google.logging.v2.GetExclusionRequest\x1a\x1f.google.logging.v2.LogExclusion"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01\x12\x1b/v2/{name=*/*/exclusions/*}Z$\x12"/v2/{name=projects/*/exclusions/*}Z)\x12\'/v2/{name=organizations/*/exclusions/*}Z#\x12!/v2/{name=folders/*/exclusions/*}Z+\x12)/v2/{name=billingAccounts/*/exclusions/*}\x12\xde\x02\n\x0f\x43reateExclusion\x12).google.logging.v2.CreateExclusionRequest\x1a\x1f.google.logging.v2.LogExclusion"\xfe\x01\x82\xd3\xe4\x93\x02\xf7\x01"\x1b/v2/{parent=*/*}/exclusions:\texclusionZ/""/v2/{parent=projects/*}/exclusions:\texclusionZ4"\'/v2/{parent=organizations/*}/exclusions:\texclusionZ."!/v2/{parent=folders/*}/exclusions:\texclusionZ6")/v2/{parent=billingAccounts/*}/exclusions:\texclusion\x12\xde\x02\n\x0fUpdateExclusion\x12).google.logging.v2.UpdateExclusionRequest\x1a\x1f.google.logging.v2.LogExclusion"\xfe\x01\x82\xd3\xe4\x93\x02\xf7\x01\x32\x1b/v2/{name=*/*/exclusions/*}:\texclusionZ/2"/v2/{name=projects/*/exclusions/*}:\texclusionZ42\'/v2/{name=organizations/*/exclusions/*}:\texclusionZ.2!/v2/{name=folders/*/exclusions/*}:\texclusionZ62)/v2/{name=billingAccounts/*/exclusions/*}:\texclusion\x12\x9e\x02\n\x0f\x44\x65leteExclusion\x12).google.logging.v2.DeleteExclusionRequest\x1a\x16.google.protobuf.Empty"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01*\x1b/v2/{name=*/*/exclusions/*}Z$*"/v2/{name=projects/*/exclusions/*}Z)*\'/v2/{name=organizations/*/exclusions/*}Z#*!/v2/{name=folders/*/exclusions/*}Z+*)/v2/{name=billingAccounts/*/exclusions/*}\x1a\xdf\x01\xca\x41\x16logging.googleapis.com\xd2\x41\xc2\x01https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-platform.read-only,https://www.googleapis.com/auth/logging.admin,https://www.googleapis.com/auth/logging.readB\x9e\x01\n\x15\x63om.google.logging.v2B\x12LoggingConfigProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3' - ), - dependencies=[ - google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, - google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, - google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR, - google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - google_dot_api_dot_client__pb2.DESCRIPTOR, - ], -) - - -_LOGSINK_VERSIONFORMAT = _descriptor.EnumDescriptor( - name="VersionFormat", - full_name="google.logging.v2.LogSink.VersionFormat", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="VERSION_FORMAT_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="V2", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="V1", index=2, number=2, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=609, - serialized_end=672, -) -_sym_db.RegisterEnumDescriptor(_LOGSINK_VERSIONFORMAT) - - -_LOGSINK = _descriptor.Descriptor( - name="LogSink", - full_name="google.logging.v2.LogSink", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.logging.v2.LogSink.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="destination", - full_name="google.logging.v2.LogSink.destination", - index=1, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="filter", - full_name="google.logging.v2.LogSink.filter", - index=2, - number=5, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="output_version_format", - full_name="google.logging.v2.LogSink.output_version_format", - index=3, - number=6, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\030\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="writer_identity", - full_name="google.logging.v2.LogSink.writer_identity", - index=4, - number=8, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="include_children", - full_name="google.logging.v2.LogSink.include_children", - index=5, - number=9, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="bigquery_options", - full_name="google.logging.v2.LogSink.bigquery_options", - index=6, - number=12, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="create_time", - full_name="google.logging.v2.LogSink.create_time", - index=7, - number=13, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="update_time", - full_name="google.logging.v2.LogSink.update_time", - index=8, - number=14, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[_LOGSINK_VERSIONFORMAT], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="options", - full_name="google.logging.v2.LogSink.options", - index=0, - containing_type=None, - fields=[], - ) - ], - serialized_start=257, - serialized_end=683, -) - - -_BIGQUERYOPTIONS = _descriptor.Descriptor( - name="BigQueryOptions", - full_name="google.logging.v2.BigQueryOptions", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="use_partitioned_tables", - full_name="google.logging.v2.BigQueryOptions.use_partitioned_tables", - index=0, - number=1, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=685, - serialized_end=734, -) - - -_LISTSINKSREQUEST = _descriptor.Descriptor( - name="ListSinksRequest", - full_name="google.logging.v2.ListSinksRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.logging.v2.ListSinksRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.logging.v2.ListSinksRequest.page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.logging.v2.ListSinksRequest.page_size", - index=2, - number=3, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=736, - serialized_end=809, -) - - -_LISTSINKSRESPONSE = _descriptor.Descriptor( - name="ListSinksResponse", - full_name="google.logging.v2.ListSinksResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="sinks", - full_name="google.logging.v2.ListSinksResponse.sinks", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.logging.v2.ListSinksResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=811, - serialized_end=898, -) - - -_GETSINKREQUEST = _descriptor.Descriptor( - name="GetSinkRequest", - full_name="google.logging.v2.GetSinkRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="sink_name", - full_name="google.logging.v2.GetSinkRequest.sink_name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=900, - serialized_end=935, -) - - -_CREATESINKREQUEST = _descriptor.Descriptor( - name="CreateSinkRequest", - full_name="google.logging.v2.CreateSinkRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.logging.v2.CreateSinkRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="sink", - full_name="google.logging.v2.CreateSinkRequest.sink", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="unique_writer_identity", - full_name="google.logging.v2.CreateSinkRequest.unique_writer_identity", - index=2, - number=3, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=937, - serialized_end=1046, -) - - -_UPDATESINKREQUEST = _descriptor.Descriptor( - name="UpdateSinkRequest", - full_name="google.logging.v2.UpdateSinkRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="sink_name", - full_name="google.logging.v2.UpdateSinkRequest.sink_name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="sink", - full_name="google.logging.v2.UpdateSinkRequest.sink", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="unique_writer_identity", - full_name="google.logging.v2.UpdateSinkRequest.unique_writer_identity", - index=2, - number=3, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="update_mask", - full_name="google.logging.v2.UpdateSinkRequest.update_mask", - index=3, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1049, - serialized_end=1210, -) - - -_DELETESINKREQUEST = _descriptor.Descriptor( - name="DeleteSinkRequest", - full_name="google.logging.v2.DeleteSinkRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="sink_name", - full_name="google.logging.v2.DeleteSinkRequest.sink_name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1212, - serialized_end=1250, -) - - -_LOGEXCLUSION = _descriptor.Descriptor( - name="LogExclusion", - full_name="google.logging.v2.LogExclusion", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.logging.v2.LogExclusion.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="description", - full_name="google.logging.v2.LogExclusion.description", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="filter", - full_name="google.logging.v2.LogExclusion.filter", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="disabled", - full_name="google.logging.v2.LogExclusion.disabled", - index=3, - number=4, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="create_time", - full_name="google.logging.v2.LogExclusion.create_time", - index=4, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="update_time", - full_name="google.logging.v2.LogExclusion.update_time", - index=5, - number=6, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1253, - serialized_end=1434, -) - - -_LISTEXCLUSIONSREQUEST = _descriptor.Descriptor( - name="ListExclusionsRequest", - full_name="google.logging.v2.ListExclusionsRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.logging.v2.ListExclusionsRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.logging.v2.ListExclusionsRequest.page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.logging.v2.ListExclusionsRequest.page_size", - index=2, - number=3, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1436, - serialized_end=1514, -) - - -_LISTEXCLUSIONSRESPONSE = _descriptor.Descriptor( - name="ListExclusionsResponse", - full_name="google.logging.v2.ListExclusionsResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="exclusions", - full_name="google.logging.v2.ListExclusionsResponse.exclusions", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.logging.v2.ListExclusionsResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1516, - serialized_end=1618, -) - - -_GETEXCLUSIONREQUEST = _descriptor.Descriptor( - name="GetExclusionRequest", - full_name="google.logging.v2.GetExclusionRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.logging.v2.GetExclusionRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1620, - serialized_end=1655, -) - - -_CREATEEXCLUSIONREQUEST = _descriptor.Descriptor( - name="CreateExclusionRequest", - full_name="google.logging.v2.CreateExclusionRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.logging.v2.CreateExclusionRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="exclusion", - full_name="google.logging.v2.CreateExclusionRequest.exclusion", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1657, - serialized_end=1749, -) - - -_UPDATEEXCLUSIONREQUEST = _descriptor.Descriptor( - name="UpdateExclusionRequest", - full_name="google.logging.v2.UpdateExclusionRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.logging.v2.UpdateExclusionRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="exclusion", - full_name="google.logging.v2.UpdateExclusionRequest.exclusion", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="update_mask", - full_name="google.logging.v2.UpdateExclusionRequest.update_mask", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1752, - serialized_end=1891, -) - - -_DELETEEXCLUSIONREQUEST = _descriptor.Descriptor( - name="DeleteExclusionRequest", - full_name="google.logging.v2.DeleteExclusionRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.logging.v2.DeleteExclusionRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1893, - serialized_end=1931, -) - -_LOGSINK.fields_by_name["output_version_format"].enum_type = _LOGSINK_VERSIONFORMAT -_LOGSINK.fields_by_name["bigquery_options"].message_type = _BIGQUERYOPTIONS -_LOGSINK.fields_by_name[ - "create_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_LOGSINK.fields_by_name[ - "update_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_LOGSINK_VERSIONFORMAT.containing_type = _LOGSINK -_LOGSINK.oneofs_by_name["options"].fields.append( - _LOGSINK.fields_by_name["bigquery_options"] -) -_LOGSINK.fields_by_name["bigquery_options"].containing_oneof = _LOGSINK.oneofs_by_name[ - "options" -] -_LISTSINKSRESPONSE.fields_by_name["sinks"].message_type = _LOGSINK -_CREATESINKREQUEST.fields_by_name["sink"].message_type = _LOGSINK -_UPDATESINKREQUEST.fields_by_name["sink"].message_type = _LOGSINK -_UPDATESINKREQUEST.fields_by_name[ - "update_mask" -].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK -_LOGEXCLUSION.fields_by_name[ - "create_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_LOGEXCLUSION.fields_by_name[ - "update_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_LISTEXCLUSIONSRESPONSE.fields_by_name["exclusions"].message_type = _LOGEXCLUSION -_CREATEEXCLUSIONREQUEST.fields_by_name["exclusion"].message_type = _LOGEXCLUSION -_UPDATEEXCLUSIONREQUEST.fields_by_name["exclusion"].message_type = _LOGEXCLUSION -_UPDATEEXCLUSIONREQUEST.fields_by_name[ - "update_mask" -].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK -DESCRIPTOR.message_types_by_name["LogSink"] = _LOGSINK -DESCRIPTOR.message_types_by_name["BigQueryOptions"] = _BIGQUERYOPTIONS -DESCRIPTOR.message_types_by_name["ListSinksRequest"] = _LISTSINKSREQUEST -DESCRIPTOR.message_types_by_name["ListSinksResponse"] = _LISTSINKSRESPONSE -DESCRIPTOR.message_types_by_name["GetSinkRequest"] = _GETSINKREQUEST -DESCRIPTOR.message_types_by_name["CreateSinkRequest"] = _CREATESINKREQUEST -DESCRIPTOR.message_types_by_name["UpdateSinkRequest"] = _UPDATESINKREQUEST -DESCRIPTOR.message_types_by_name["DeleteSinkRequest"] = _DELETESINKREQUEST -DESCRIPTOR.message_types_by_name["LogExclusion"] = _LOGEXCLUSION -DESCRIPTOR.message_types_by_name["ListExclusionsRequest"] = _LISTEXCLUSIONSREQUEST -DESCRIPTOR.message_types_by_name["ListExclusionsResponse"] = _LISTEXCLUSIONSRESPONSE -DESCRIPTOR.message_types_by_name["GetExclusionRequest"] = _GETEXCLUSIONREQUEST -DESCRIPTOR.message_types_by_name["CreateExclusionRequest"] = _CREATEEXCLUSIONREQUEST -DESCRIPTOR.message_types_by_name["UpdateExclusionRequest"] = _UPDATEEXCLUSIONREQUEST -DESCRIPTOR.message_types_by_name["DeleteExclusionRequest"] = _DELETEEXCLUSIONREQUEST -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -LogSink = _reflection.GeneratedProtocolMessageType( - "LogSink", - (_message.Message,), - dict( - DESCRIPTOR=_LOGSINK, - __module__="google.cloud.logging_v2.proto.logging_config_pb2", - __doc__="""Describes a sink used to export log entries to one of the following - destinations in any project: a Cloud Storage bucket, a BigQuery dataset, - or a Cloud Pub/Sub topic. A logs filter controls which log entries are - exported. The sink must be created within a project, organization, - billing account, or folder. - - - Attributes: - name: - Required. The client-assigned sink identifier, unique within - the project. Example: ``"my-syslog-errors-to-pubsub"``. Sink - identifiers are limited to 100 characters and can include only - the following characters: upper and lower-case alphanumeric - characters, underscores, hyphens, and periods. - destination: - Required. The export destination: :: - "storage.googleapis.com/[GCS_BUCKET]" "bigquery.googleapis - .com/projects/[PROJECT_ID]/datasets/[DATASET]" "pubsub.goo - gleapis.com/projects/[PROJECT_ID]/topics/[TOPIC_ID]" The - sink's ``writer_identity``, set when the sink is created, must - have permission to write to the destination or else the log - entries are not exported. For more information, see `Exporting - Logs with Sinks `__. - filter: - Optional. An `advanced logs filter - `__. The only exported - log entries are those that are in the resource owning the sink - and that match the filter. For example: :: - logName="projects/[PROJECT_ID]/logs/[LOG_ID]" AND - severity>=ERROR - output_version_format: - Deprecated. The log entry format to use for this sink's - exported log entries. The v2 format is used by default and - cannot be changed. - writer_identity: - Output only. An IAM identity—a service account or group—under - which Logging writes the exported log entries to the sink's - destination. This field is set by - [sinks.create][google.logging.v2.ConfigServiceV2.CreateSink] - and - [sinks.update][google.logging.v2.ConfigServiceV2.UpdateSink] - based on the value of ``unique_writer_identity`` in those - methods. Until you grant this identity write-access to the - destination, log entry exports from this sink will fail. For - more information, see `Granting Access for a Resource - `__. Consult the - destination service's documentation to determine the - appropriate IAM roles to assign to the identity. - include_children: - Optional. This field applies only to sinks owned by - organizations and folders. If the field is false, the default, - only the logs owned by the sink's parent resource are - available for export. If the field is true, then logs from all - the projects, folders, and billing accounts contained in the - sink's parent resource are also available for export. Whether - a particular log entry from the children is exported depends - on the sink's filter expression. For example, if this field is - true, then the filter ``resource.type=gce_instance`` would - export all Compute Engine VM instance log entries from all - projects in the sink's parent. To only export entries from - certain child projects, filter on the project part of the log - name: :: logName:("projects/test-project1/" OR - "projects/test-project2/") AND resource.type=gce_instance - options: - Optional. Destination dependent options. - bigquery_options: - Optional. Options that affect sinks exporting data to - BigQuery. - create_time: - Output only. The creation timestamp of the sink. This field - may not be present for older sinks. - update_time: - Output only. The last update timestamp of the sink. This - field may not be present for older sinks. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.LogSink) - ), -) -_sym_db.RegisterMessage(LogSink) - -BigQueryOptions = _reflection.GeneratedProtocolMessageType( - "BigQueryOptions", - (_message.Message,), - dict( - DESCRIPTOR=_BIGQUERYOPTIONS, - __module__="google.cloud.logging_v2.proto.logging_config_pb2", - __doc__="""Options that change functionality of a sink exporting data to BigQuery. - - - Attributes: - use_partitioned_tables: - Optional. Whether to use `BigQuery's partition tables - `__. By default, Logging - creates dated tables based on the log entries' timestamps, - e.g. syslog\_20170523. With partitioned tables the date suffix - is no longer present and `special query syntax - `__ has to be used - instead. In both cases, tables are sharded based on UTC - timezone. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.BigQueryOptions) - ), -) -_sym_db.RegisterMessage(BigQueryOptions) - -ListSinksRequest = _reflection.GeneratedProtocolMessageType( - "ListSinksRequest", - (_message.Message,), - dict( - DESCRIPTOR=_LISTSINKSREQUEST, - __module__="google.cloud.logging_v2.proto.logging_config_pb2", - __doc__="""The parameters to ``ListSinks``. - - - Attributes: - parent: - Required. The parent resource whose sinks are to be listed: - :: "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" - page_token: - Optional. If present, then retrieve the next batch of results - from the preceding call to this method. ``pageToken`` must be - the value of ``nextPageToken`` from the previous response. The - values of other method parameters should be identical to those - in the previous call. - page_size: - Optional. The maximum number of results to return from this - request. Non-positive values are ignored. The presence of - ``nextPageToken`` in the response indicates that more results - might be available. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.ListSinksRequest) - ), -) -_sym_db.RegisterMessage(ListSinksRequest) - -ListSinksResponse = _reflection.GeneratedProtocolMessageType( - "ListSinksResponse", - (_message.Message,), - dict( - DESCRIPTOR=_LISTSINKSRESPONSE, - __module__="google.cloud.logging_v2.proto.logging_config_pb2", - __doc__="""Result returned from ``ListSinks``. - - - Attributes: - sinks: - A list of sinks. - next_page_token: - If there might be more results than appear in this response, - then ``nextPageToken`` is included. To get the next set of - results, call the same method again using the value of - ``nextPageToken`` as ``pageToken``. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.ListSinksResponse) - ), -) -_sym_db.RegisterMessage(ListSinksResponse) - -GetSinkRequest = _reflection.GeneratedProtocolMessageType( - "GetSinkRequest", - (_message.Message,), - dict( - DESCRIPTOR=_GETSINKREQUEST, - __module__="google.cloud.logging_v2.proto.logging_config_pb2", - __doc__="""The parameters to ``GetSink``. - - - Attributes: - sink_name: - Required. The resource name of the sink: :: - "projects/[PROJECT_ID]/sinks/[SINK_ID]" - "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" - "folders/[FOLDER_ID]/sinks/[SINK_ID]" Example: - ``"projects/my-project-id/sinks/my-sink-id"``. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.GetSinkRequest) - ), -) -_sym_db.RegisterMessage(GetSinkRequest) - -CreateSinkRequest = _reflection.GeneratedProtocolMessageType( - "CreateSinkRequest", - (_message.Message,), - dict( - DESCRIPTOR=_CREATESINKREQUEST, - __module__="google.cloud.logging_v2.proto.logging_config_pb2", - __doc__="""The parameters to ``CreateSink``. - - - Attributes: - parent: - Required. The resource in which to create the sink: :: - "projects/[PROJECT_ID]" "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" Examples: ``"projects/my-logging- - project"``, ``"organizations/123456789"``. - sink: - Required. The new sink, whose ``name`` parameter is a sink - identifier that is not already in use. - unique_writer_identity: - Optional. Determines the kind of IAM identity returned as - ``writer_identity`` in the new sink. If this value is omitted - or set to false, and if the sink's parent is a project, then - the value returned as ``writer_identity`` is the same group or - service account used by Logging before the addition of writer - identities to this API. The sink's destination must be in the - same project as the sink itself. If this field is set to - true, or if the sink is owned by a non-project resource such - as an organization, then the value of ``writer_identity`` will - be a unique service account used only for exports from the new - sink. For more information, see ``writer_identity`` in - [LogSink][google.logging.v2.LogSink]. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.CreateSinkRequest) - ), -) -_sym_db.RegisterMessage(CreateSinkRequest) - -UpdateSinkRequest = _reflection.GeneratedProtocolMessageType( - "UpdateSinkRequest", - (_message.Message,), - dict( - DESCRIPTOR=_UPDATESINKREQUEST, - __module__="google.cloud.logging_v2.proto.logging_config_pb2", - __doc__="""The parameters to ``UpdateSink``. - - - Attributes: - sink_name: - Required. The full resource name of the sink to update, - including the parent resource and the sink identifier: :: - "projects/[PROJECT_ID]/sinks/[SINK_ID]" - "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" - "folders/[FOLDER_ID]/sinks/[SINK_ID]" Example: - ``"projects/my-project-id/sinks/my-sink-id"``. - sink: - Required. The updated sink, whose name is the same identifier - that appears as part of ``sink_name``. - unique_writer_identity: - Optional. See - [sinks.create][google.logging.v2.ConfigServiceV2.CreateSink] - for a description of this field. When updating a sink, the - effect of this field on the value of ``writer_identity`` in - the updated sink depends on both the old and new values of - this field: - If the old and new values of this field are - both false or both true, then there is no change to the - sink's ``writer_identity``. - If the old value is false and - the new value is true, then ``writer_identity`` is changed - to a unique service account. - It is an error if the old - value is true and the new value is set to false or - defaulted to false. - update_mask: - Optional. Field mask that specifies the fields in ``sink`` - that need an update. A sink field will be overwritten if, and - only if, it is in the update mask. ``name`` and output only - fields cannot be updated. An empty updateMask is temporarily - treated as using the following mask for backwards - compatibility purposes: destination,filter,includeChildren At - some point in the future, behavior will be removed and - specifying an empty updateMask will be an error. For a - detailed ``FieldMask`` definition, see - https://developers.google.com/protocol-buffers/docs/reference/ - google.protobuf#google.protobuf.FieldMask Example: - ``updateMask=filter``. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.UpdateSinkRequest) - ), -) -_sym_db.RegisterMessage(UpdateSinkRequest) - -DeleteSinkRequest = _reflection.GeneratedProtocolMessageType( - "DeleteSinkRequest", - (_message.Message,), - dict( - DESCRIPTOR=_DELETESINKREQUEST, - __module__="google.cloud.logging_v2.proto.logging_config_pb2", - __doc__="""The parameters to ``DeleteSink``. - - - Attributes: - sink_name: - Required. The full resource name of the sink to delete, - including the parent resource and the sink identifier: :: - "projects/[PROJECT_ID]/sinks/[SINK_ID]" - "organizations/[ORGANIZATION_ID]/sinks/[SINK_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/sinks/[SINK_ID]" - "folders/[FOLDER_ID]/sinks/[SINK_ID]" Example: - ``"projects/my-project-id/sinks/my-sink-id"``. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.DeleteSinkRequest) - ), -) -_sym_db.RegisterMessage(DeleteSinkRequest) - -LogExclusion = _reflection.GeneratedProtocolMessageType( - "LogExclusion", - (_message.Message,), - dict( - DESCRIPTOR=_LOGEXCLUSION, - __module__="google.cloud.logging_v2.proto.logging_config_pb2", - __doc__="""Specifies a set of log entries that are not to be stored in Logging. If - your GCP resource receives a large volume of logs, you can use - exclusions to reduce your chargeable logs. Exclusions are processed - after log sinks, so you can export log entries before they are excluded. - Note that organization-level and folder-level exclusions don't apply to - child resources, and that you can't exclude audit log entries. - - - Attributes: - name: - Required. A client-assigned identifier, such as ``"load- - balancer-exclusion"``. Identifiers are limited to 100 - characters and can include only letters, digits, underscores, - hyphens, and periods. - description: - Optional. A description of this exclusion. - filter: - Required. An `advanced logs filter - `__ that matches the log - entries to be excluded. By using the `sample function - `__, you can - exclude less than 100% of the matching log entries. For - example, the following query matches 99% of low-severity log - entries from Google Cloud Storage buckets: - ``"resource.type=gcs_bucket severity\n\x0e\x62ucket_options\x18\x08 \x01(\x0b\x32&.google.api.Distribution.BucketOptions\x12/\n\x0b\x63reate_time\x18\t \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\n \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12<\n\x07version\x18\x04 \x01(\x0e\x32\'.google.logging.v2.LogMetric.ApiVersionB\x02\x18\x01\x1a\x36\n\x14LabelExtractorsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x1c\n\nApiVersion\x12\x06\n\x02V2\x10\x00\x12\x06\n\x02V1\x10\x01"N\n\x15ListLogMetricsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05"`\n\x16ListLogMetricsResponse\x12-\n\x07metrics\x18\x01 \x03(\x0b\x32\x1c.google.logging.v2.LogMetric\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"*\n\x13GetLogMetricRequest\x12\x13\n\x0bmetric_name\x18\x01 \x01(\t"V\n\x16\x43reateLogMetricRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12,\n\x06metric\x18\x02 \x01(\x0b\x32\x1c.google.logging.v2.LogMetric"[\n\x16UpdateLogMetricRequest\x12\x13\n\x0bmetric_name\x18\x01 \x01(\t\x12,\n\x06metric\x18\x02 \x01(\x0b\x32\x1c.google.logging.v2.LogMetric"-\n\x16\x44\x65leteLogMetricRequest\x12\x13\n\x0bmetric_name\x18\x01 \x01(\t2\xe4\x07\n\x10MetricsServiceV2\x12\x8e\x01\n\x0eListLogMetrics\x12(.google.logging.v2.ListLogMetricsRequest\x1a).google.logging.v2.ListLogMetricsResponse"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v2/{parent=projects/*}/metrics\x12\x84\x01\n\x0cGetLogMetric\x12&.google.logging.v2.GetLogMetricRequest\x1a\x1c.google.logging.v2.LogMetric".\x82\xd3\xe4\x93\x02(\x12&/v2/{metric_name=projects/*/metrics/*}\x12\x8b\x01\n\x0f\x43reateLogMetric\x12).google.logging.v2.CreateLogMetricRequest\x1a\x1c.google.logging.v2.LogMetric"/\x82\xd3\xe4\x93\x02)"\x1f/v2/{parent=projects/*}/metrics:\x06metric\x12\x92\x01\n\x0fUpdateLogMetric\x12).google.logging.v2.UpdateLogMetricRequest\x1a\x1c.google.logging.v2.LogMetric"6\x82\xd3\xe4\x93\x02\x30\x1a&/v2/{metric_name=projects/*/metrics/*}:\x06metric\x12\x84\x01\n\x0f\x44\x65leteLogMetric\x12).google.logging.v2.DeleteLogMetricRequest\x1a\x16.google.protobuf.Empty".\x82\xd3\xe4\x93\x02(*&/v2/{metric_name=projects/*/metrics/*}\x1a\x8d\x02\xca\x41\x16logging.googleapis.com\xd2\x41\xf0\x01https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-platform.read-only,https://www.googleapis.com/auth/logging.admin,https://www.googleapis.com/auth/logging.read,https://www.googleapis.com/auth/logging.writeB\x9f\x01\n\x15\x63om.google.logging.v2B\x13LoggingMetricsProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3' - ), - dependencies=[ - google_dot_api_dot_distribution__pb2.DESCRIPTOR, - google_dot_api_dot_metric__pb2.DESCRIPTOR, - google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, - google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR, - google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - google_dot_api_dot_client__pb2.DESCRIPTOR, - ], -) - - -_LOGMETRIC_APIVERSION = _descriptor.EnumDescriptor( - name="ApiVersion", - full_name="google.logging.v2.LogMetric.ApiVersion", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="V2", index=0, number=0, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="V1", index=1, number=1, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=785, - serialized_end=813, -) -_sym_db.RegisterEnumDescriptor(_LOGMETRIC_APIVERSION) - - -_LOGMETRIC_LABELEXTRACTORSENTRY = _descriptor.Descriptor( - name="LabelExtractorsEntry", - full_name="google.logging.v2.LogMetric.LabelExtractorsEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.logging.v2.LogMetric.LabelExtractorsEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.logging.v2.LogMetric.LabelExtractorsEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b("8\001"), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=729, - serialized_end=783, -) - -_LOGMETRIC = _descriptor.Descriptor( - name="LogMetric", - full_name="google.logging.v2.LogMetric", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.logging.v2.LogMetric.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="description", - full_name="google.logging.v2.LogMetric.description", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="filter", - full_name="google.logging.v2.LogMetric.filter", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="metric_descriptor", - full_name="google.logging.v2.LogMetric.metric_descriptor", - index=3, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value_extractor", - full_name="google.logging.v2.LogMetric.value_extractor", - index=4, - number=6, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="label_extractors", - full_name="google.logging.v2.LogMetric.label_extractors", - index=5, - number=7, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="bucket_options", - full_name="google.logging.v2.LogMetric.bucket_options", - index=6, - number=8, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="create_time", - full_name="google.logging.v2.LogMetric.create_time", - index=7, - number=9, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="update_time", - full_name="google.logging.v2.LogMetric.update_time", - index=8, - number=10, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="version", - full_name="google.logging.v2.LogMetric.version", - index=9, - number=4, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\030\001"), - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_LOGMETRIC_LABELEXTRACTORSENTRY], - enum_types=[_LOGMETRIC_APIVERSION], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=282, - serialized_end=813, -) - - -_LISTLOGMETRICSREQUEST = _descriptor.Descriptor( - name="ListLogMetricsRequest", - full_name="google.logging.v2.ListLogMetricsRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.logging.v2.ListLogMetricsRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.logging.v2.ListLogMetricsRequest.page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.logging.v2.ListLogMetricsRequest.page_size", - index=2, - number=3, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=815, - serialized_end=893, -) - - -_LISTLOGMETRICSRESPONSE = _descriptor.Descriptor( - name="ListLogMetricsResponse", - full_name="google.logging.v2.ListLogMetricsResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="metrics", - full_name="google.logging.v2.ListLogMetricsResponse.metrics", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.logging.v2.ListLogMetricsResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=895, - serialized_end=991, -) - - -_GETLOGMETRICREQUEST = _descriptor.Descriptor( - name="GetLogMetricRequest", - full_name="google.logging.v2.GetLogMetricRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="metric_name", - full_name="google.logging.v2.GetLogMetricRequest.metric_name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=993, - serialized_end=1035, -) - - -_CREATELOGMETRICREQUEST = _descriptor.Descriptor( - name="CreateLogMetricRequest", - full_name="google.logging.v2.CreateLogMetricRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.logging.v2.CreateLogMetricRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="metric", - full_name="google.logging.v2.CreateLogMetricRequest.metric", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1037, - serialized_end=1123, -) - - -_UPDATELOGMETRICREQUEST = _descriptor.Descriptor( - name="UpdateLogMetricRequest", - full_name="google.logging.v2.UpdateLogMetricRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="metric_name", - full_name="google.logging.v2.UpdateLogMetricRequest.metric_name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="metric", - full_name="google.logging.v2.UpdateLogMetricRequest.metric", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1125, - serialized_end=1216, -) - - -_DELETELOGMETRICREQUEST = _descriptor.Descriptor( - name="DeleteLogMetricRequest", - full_name="google.logging.v2.DeleteLogMetricRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="metric_name", - full_name="google.logging.v2.DeleteLogMetricRequest.metric_name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1218, - serialized_end=1263, -) - -_LOGMETRIC_LABELEXTRACTORSENTRY.containing_type = _LOGMETRIC -_LOGMETRIC.fields_by_name[ - "metric_descriptor" -].message_type = google_dot_api_dot_metric__pb2._METRICDESCRIPTOR -_LOGMETRIC.fields_by_name[ - "label_extractors" -].message_type = _LOGMETRIC_LABELEXTRACTORSENTRY -_LOGMETRIC.fields_by_name[ - "bucket_options" -].message_type = google_dot_api_dot_distribution__pb2._DISTRIBUTION_BUCKETOPTIONS -_LOGMETRIC.fields_by_name[ - "create_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_LOGMETRIC.fields_by_name[ - "update_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_LOGMETRIC.fields_by_name["version"].enum_type = _LOGMETRIC_APIVERSION -_LOGMETRIC_APIVERSION.containing_type = _LOGMETRIC -_LISTLOGMETRICSRESPONSE.fields_by_name["metrics"].message_type = _LOGMETRIC -_CREATELOGMETRICREQUEST.fields_by_name["metric"].message_type = _LOGMETRIC -_UPDATELOGMETRICREQUEST.fields_by_name["metric"].message_type = _LOGMETRIC -DESCRIPTOR.message_types_by_name["LogMetric"] = _LOGMETRIC -DESCRIPTOR.message_types_by_name["ListLogMetricsRequest"] = _LISTLOGMETRICSREQUEST -DESCRIPTOR.message_types_by_name["ListLogMetricsResponse"] = _LISTLOGMETRICSRESPONSE -DESCRIPTOR.message_types_by_name["GetLogMetricRequest"] = _GETLOGMETRICREQUEST -DESCRIPTOR.message_types_by_name["CreateLogMetricRequest"] = _CREATELOGMETRICREQUEST -DESCRIPTOR.message_types_by_name["UpdateLogMetricRequest"] = _UPDATELOGMETRICREQUEST -DESCRIPTOR.message_types_by_name["DeleteLogMetricRequest"] = _DELETELOGMETRICREQUEST -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -LogMetric = _reflection.GeneratedProtocolMessageType( - "LogMetric", - (_message.Message,), - dict( - LabelExtractorsEntry=_reflection.GeneratedProtocolMessageType( - "LabelExtractorsEntry", - (_message.Message,), - dict( - DESCRIPTOR=_LOGMETRIC_LABELEXTRACTORSENTRY, - __module__="google.cloud.logging_v2.proto.logging_metrics_pb2" - # @@protoc_insertion_point(class_scope:google.logging.v2.LogMetric.LabelExtractorsEntry) - ), - ), - DESCRIPTOR=_LOGMETRIC, - __module__="google.cloud.logging_v2.proto.logging_metrics_pb2", - __doc__="""Describes a logs-based metric. The value of the metric is the number of - log entries that match a logs filter in a given time interval. - - Logs-based metric can also be used to extract values from logs and - create a a distribution of the values. The distribution records the - statistics of the extracted values along with an optional histogram of - the values as specified by the bucket options. - - - Attributes: - name: - Required. The client-assigned metric identifier. Examples: - ``"error_count"``, ``"nginx/requests"``. Metric identifiers - are limited to 100 characters and can include only the - following characters: ``A-Z``, ``a-z``, ``0-9``, and the - special characters ``_-.,+!*',()%/``. The forward-slash - character (``/``) denotes a hierarchy of name pieces, and it - cannot be the first character of the name. The metric - identifier in this field must not be `URL-encoded - `__. However, - when the metric identifier appears as the ``[METRIC_ID]`` part - of a ``metric_name`` API parameter, then the metric identifier - must be URL-encoded. Example: ``"projects/my- - project/metrics/nginx%2Frequests"``. - description: - Optional. A description of this metric, which is used in - documentation. The maximum length of the description is 8000 - characters. - filter: - Required. An `advanced logs filter - `__ which is used to - match log entries. Example: :: "resource.type=gae_app - AND severity>=ERROR" The maximum length of the filter is - 20000 characters. - metric_descriptor: - Optional. The metric descriptor associated with the logs-based - metric. If unspecified, it uses a default metric descriptor - with a DELTA metric kind, INT64 value type, with no labels and - a unit of "1". Such a metric counts the number of log entries - matching the ``filter`` expression. The ``name``, ``type``, - and ``description`` fields in the ``metric_descriptor`` are - output only, and is constructed using the ``name`` and - ``description`` field in the LogMetric. To create a logs- - based metric that records a distribution of log values, a - DELTA metric kind with a DISTRIBUTION value type must be used - along with a ``value_extractor`` expression in the LogMetric. - Each label in the metric descriptor must have a matching label - name as the key and an extractor expression as the value in - the ``label_extractors`` map. The ``metric_kind`` and - ``value_type`` fields in the ``metric_descriptor`` cannot be - updated once initially configured. New labels can be added in - the ``metric_descriptor``, but existing labels cannot be - modified except for their description. - value_extractor: - Optional. A ``value_extractor`` is required when using a - distribution logs-based metric to extract the values to record - from a log entry. Two functions are supported for value - extraction: ``EXTRACT(field)`` or ``REGEXP_EXTRACT(field, - regex)``. The argument are: 1. field: The name of the log - entry field from which the value is to be extracted. 2. regex: - A regular expression using the Google RE2 syntax - (https://github.com/google/re2/wiki/Syntax) with a single - capture group to extract data from the specified log entry - field. The value of the field is converted to a string before - applying the regex. It is an error to specify a regex that - does not include exactly one capture group. The result of the - extraction must be convertible to a double type, as the - distribution always records double values. If either the - extraction or the conversion to double fails, then those - values are not recorded in the distribution. Example: - ``REGEXP_EXTRACT(jsonPayload.request, ".*quantity=(\d+).*")`` - label_extractors: - Optional. A map from a label key string to an extractor - expression which is used to extract data from a log entry - field and assign as the label value. Each label key specified - in the LabelDescriptor must have an associated extractor - expression in this map. The syntax of the extractor expression - is the same as for the ``value_extractor`` field. The - extracted value is converted to the type defined in the label - descriptor. If the either the extraction or the type - conversion fails, the label will have a default value. The - default value for a string label is an empty string, for an - integer label its 0, and for a boolean label its ``false``. - Note that there are upper bounds on the maximum number of - labels and the number of active time series that are allowed - in a project. - bucket_options: - Optional. The ``bucket_options`` are required when the logs- - based metric is using a DISTRIBUTION value type and it - describes the bucket boundaries used to create a histogram of - the extracted values. - create_time: - Output only. The creation timestamp of the metric. This field - may not be present for older metrics. - update_time: - Output only. The last update timestamp of the metric. This - field may not be present for older metrics. - version: - Deprecated. The API version that created or updated this - metric. The v2 format is used by default and cannot be - changed. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.LogMetric) - ), -) -_sym_db.RegisterMessage(LogMetric) -_sym_db.RegisterMessage(LogMetric.LabelExtractorsEntry) - -ListLogMetricsRequest = _reflection.GeneratedProtocolMessageType( - "ListLogMetricsRequest", - (_message.Message,), - dict( - DESCRIPTOR=_LISTLOGMETRICSREQUEST, - __module__="google.cloud.logging_v2.proto.logging_metrics_pb2", - __doc__="""The parameters to ListLogMetrics. - - - Attributes: - parent: - Required. The name of the project containing the metrics: :: - "projects/[PROJECT_ID]" - page_token: - Optional. If present, then retrieve the next batch of results - from the preceding call to this method. ``pageToken`` must be - the value of ``nextPageToken`` from the previous response. The - values of other method parameters should be identical to those - in the previous call. - page_size: - Optional. The maximum number of results to return from this - request. Non-positive values are ignored. The presence of - ``nextPageToken`` in the response indicates that more results - might be available. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.ListLogMetricsRequest) - ), -) -_sym_db.RegisterMessage(ListLogMetricsRequest) - -ListLogMetricsResponse = _reflection.GeneratedProtocolMessageType( - "ListLogMetricsResponse", - (_message.Message,), - dict( - DESCRIPTOR=_LISTLOGMETRICSRESPONSE, - __module__="google.cloud.logging_v2.proto.logging_metrics_pb2", - __doc__="""Result returned from ListLogMetrics. - - - Attributes: - metrics: - A list of logs-based metrics. - next_page_token: - If there might be more results than appear in this response, - then ``nextPageToken`` is included. To get the next set of - results, call this method again using the value of - ``nextPageToken`` as ``pageToken``. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.ListLogMetricsResponse) - ), -) -_sym_db.RegisterMessage(ListLogMetricsResponse) - -GetLogMetricRequest = _reflection.GeneratedProtocolMessageType( - "GetLogMetricRequest", - (_message.Message,), - dict( - DESCRIPTOR=_GETLOGMETRICREQUEST, - __module__="google.cloud.logging_v2.proto.logging_metrics_pb2", - __doc__="""The parameters to GetLogMetric. - - - Attributes: - metric_name: - The resource name of the desired metric: :: - "projects/[PROJECT_ID]/metrics/[METRIC_ID]" - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.GetLogMetricRequest) - ), -) -_sym_db.RegisterMessage(GetLogMetricRequest) - -CreateLogMetricRequest = _reflection.GeneratedProtocolMessageType( - "CreateLogMetricRequest", - (_message.Message,), - dict( - DESCRIPTOR=_CREATELOGMETRICREQUEST, - __module__="google.cloud.logging_v2.proto.logging_metrics_pb2", - __doc__="""The parameters to CreateLogMetric. - - - Attributes: - parent: - The resource name of the project in which to create the - metric: :: "projects/[PROJECT_ID]" The new metric must - be provided in the request. - metric: - The new logs-based metric, which must not have an identifier - that already exists. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.CreateLogMetricRequest) - ), -) -_sym_db.RegisterMessage(CreateLogMetricRequest) - -UpdateLogMetricRequest = _reflection.GeneratedProtocolMessageType( - "UpdateLogMetricRequest", - (_message.Message,), - dict( - DESCRIPTOR=_UPDATELOGMETRICREQUEST, - __module__="google.cloud.logging_v2.proto.logging_metrics_pb2", - __doc__="""The parameters to UpdateLogMetric. - - - Attributes: - metric_name: - The resource name of the metric to update: :: - "projects/[PROJECT_ID]/metrics/[METRIC_ID]" The updated - metric must be provided in the request and it's ``name`` field - must be the same as ``[METRIC_ID]`` If the metric does not - exist in ``[PROJECT_ID]``, then a new metric is created. - metric: - The updated metric. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.UpdateLogMetricRequest) - ), -) -_sym_db.RegisterMessage(UpdateLogMetricRequest) - -DeleteLogMetricRequest = _reflection.GeneratedProtocolMessageType( - "DeleteLogMetricRequest", - (_message.Message,), - dict( - DESCRIPTOR=_DELETELOGMETRICREQUEST, - __module__="google.cloud.logging_v2.proto.logging_metrics_pb2", - __doc__="""The parameters to DeleteLogMetric. - - - Attributes: - metric_name: - The resource name of the metric to delete: :: - "projects/[PROJECT_ID]/metrics/[METRIC_ID]" - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.DeleteLogMetricRequest) - ), -) -_sym_db.RegisterMessage(DeleteLogMetricRequest) - - -DESCRIPTOR._options = None -_LOGMETRIC_LABELEXTRACTORSENTRY._options = None -_LOGMETRIC.fields_by_name["version"]._options = None - -_METRICSSERVICEV2 = _descriptor.ServiceDescriptor( - name="MetricsServiceV2", - full_name="google.logging.v2.MetricsServiceV2", - file=DESCRIPTOR, - index=0, - serialized_options=_b( - "\312A\026logging.googleapis.com\322A\360\001https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-platform.read-only,https://www.googleapis.com/auth/logging.admin,https://www.googleapis.com/auth/logging.read,https://www.googleapis.com/auth/logging.write" - ), - serialized_start=1266, - serialized_end=2262, - methods=[ - _descriptor.MethodDescriptor( - name="ListLogMetrics", - full_name="google.logging.v2.MetricsServiceV2.ListLogMetrics", - index=0, - containing_service=None, - input_type=_LISTLOGMETRICSREQUEST, - output_type=_LISTLOGMETRICSRESPONSE, - serialized_options=_b( - "\202\323\344\223\002!\022\037/v2/{parent=projects/*}/metrics" - ), - ), - _descriptor.MethodDescriptor( - name="GetLogMetric", - full_name="google.logging.v2.MetricsServiceV2.GetLogMetric", - index=1, - containing_service=None, - input_type=_GETLOGMETRICREQUEST, - output_type=_LOGMETRIC, - serialized_options=_b( - "\202\323\344\223\002(\022&/v2/{metric_name=projects/*/metrics/*}" - ), - ), - _descriptor.MethodDescriptor( - name="CreateLogMetric", - full_name="google.logging.v2.MetricsServiceV2.CreateLogMetric", - index=2, - containing_service=None, - input_type=_CREATELOGMETRICREQUEST, - output_type=_LOGMETRIC, - serialized_options=_b( - '\202\323\344\223\002)"\037/v2/{parent=projects/*}/metrics:\006metric' - ), - ), - _descriptor.MethodDescriptor( - name="UpdateLogMetric", - full_name="google.logging.v2.MetricsServiceV2.UpdateLogMetric", - index=3, - containing_service=None, - input_type=_UPDATELOGMETRICREQUEST, - output_type=_LOGMETRIC, - serialized_options=_b( - "\202\323\344\223\0020\032&/v2/{metric_name=projects/*/metrics/*}:\006metric" - ), - ), - _descriptor.MethodDescriptor( - name="DeleteLogMetric", - full_name="google.logging.v2.MetricsServiceV2.DeleteLogMetric", - index=4, - containing_service=None, - input_type=_DELETELOGMETRICREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=_b( - "\202\323\344\223\002(*&/v2/{metric_name=projects/*/metrics/*}" - ), - ), - ], -) -_sym_db.RegisterServiceDescriptor(_METRICSSERVICEV2) - -DESCRIPTOR.services_by_name["MetricsServiceV2"] = _METRICSSERVICEV2 - -# @@protoc_insertion_point(module_scope) diff --git a/logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/logging_metrics_pb2_grpc.py b/logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/logging_metrics_pb2_grpc.py deleted file mode 100644 index 09f84e038a1b..000000000000 --- a/logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/logging_metrics_pb2_grpc.py +++ /dev/null @@ -1,118 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc - -from google.cloud.logging_v2.proto import ( - logging_metrics_pb2 as google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2, -) -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 - - -class MetricsServiceV2Stub(object): - """Service for configuring logs-based metrics. - """ - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.ListLogMetrics = channel.unary_unary( - "/google.logging.v2.MetricsServiceV2/ListLogMetrics", - request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.ListLogMetricsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.ListLogMetricsResponse.FromString, - ) - self.GetLogMetric = channel.unary_unary( - "/google.logging.v2.MetricsServiceV2/GetLogMetric", - request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.GetLogMetricRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.LogMetric.FromString, - ) - self.CreateLogMetric = channel.unary_unary( - "/google.logging.v2.MetricsServiceV2/CreateLogMetric", - request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.CreateLogMetricRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.LogMetric.FromString, - ) - self.UpdateLogMetric = channel.unary_unary( - "/google.logging.v2.MetricsServiceV2/UpdateLogMetric", - request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.UpdateLogMetricRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.LogMetric.FromString, - ) - self.DeleteLogMetric = channel.unary_unary( - "/google.logging.v2.MetricsServiceV2/DeleteLogMetric", - request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.DeleteLogMetricRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - - -class MetricsServiceV2Servicer(object): - """Service for configuring logs-based metrics. - """ - - def ListLogMetrics(self, request, context): - """Lists logs-based metrics. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def GetLogMetric(self, request, context): - """Gets a logs-based metric. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def CreateLogMetric(self, request, context): - """Creates a logs-based metric. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def UpdateLogMetric(self, request, context): - """Creates or updates a logs-based metric. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def DeleteLogMetric(self, request, context): - """Deletes a logs-based metric. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - -def add_MetricsServiceV2Servicer_to_server(servicer, server): - rpc_method_handlers = { - "ListLogMetrics": grpc.unary_unary_rpc_method_handler( - servicer.ListLogMetrics, - request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.ListLogMetricsRequest.FromString, - response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.ListLogMetricsResponse.SerializeToString, - ), - "GetLogMetric": grpc.unary_unary_rpc_method_handler( - servicer.GetLogMetric, - request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.GetLogMetricRequest.FromString, - response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.LogMetric.SerializeToString, - ), - "CreateLogMetric": grpc.unary_unary_rpc_method_handler( - servicer.CreateLogMetric, - request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.CreateLogMetricRequest.FromString, - response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.LogMetric.SerializeToString, - ), - "UpdateLogMetric": grpc.unary_unary_rpc_method_handler( - servicer.UpdateLogMetric, - request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.UpdateLogMetricRequest.FromString, - response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.LogMetric.SerializeToString, - ), - "DeleteLogMetric": grpc.unary_unary_rpc_method_handler( - servicer.DeleteLogMetric, - request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__metrics__pb2.DeleteLogMetricRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - "google.logging.v2.MetricsServiceV2", rpc_method_handlers - ) - server.add_generic_rpc_handlers((generic_handler,)) diff --git a/logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/logging_pb2.py b/logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/logging_pb2.py deleted file mode 100644 index 04bd84375901..000000000000 --- a/logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/logging_pb2.py +++ /dev/null @@ -1,1312 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/logging_v2/proto/logging.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import ( - monitored_resource_pb2 as google_dot_api_dot_monitored__resource__pb2, -) -from google.cloud.logging_v2.proto import ( - log_entry_pb2 as google_dot_cloud_dot_logging__v2_dot_proto_dot_log__entry__pb2, -) -from google.cloud.logging_v2.proto import ( - logging_config_pb2 as google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2, -) -from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -from google.rpc import status_pb2 as google_dot_rpc_dot_status__pb2 -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.api import client_pb2 as google_dot_api_dot_client__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/logging_v2/proto/logging.proto", - package="google.logging.v2", - syntax="proto3", - serialized_options=_b( - "\n\025com.google.logging.v2B\014LoggingProtoP\001Z8google.golang.org/genproto/googleapis/logging/v2;logging\370\001\001\252\002\027Google.Cloud.Logging.V2\312\002\027Google\\Cloud\\Logging\\V2" - ), - serialized_pb=_b( - '\n+google/cloud/logging_v2/proto/logging.proto\x12\x11google.logging.v2\x1a#google/api/monitored_resource.proto\x1a-google/cloud/logging_v2/proto/log_entry.proto\x1a\x32google/cloud/logging_v2/proto/logging_config.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto"$\n\x10\x44\x65leteLogRequest\x12\x10\n\x08log_name\x18\x01 \x01(\t"\xa9\x02\n\x16WriteLogEntriesRequest\x12\x10\n\x08log_name\x18\x01 \x01(\t\x12/\n\x08resource\x18\x02 \x01(\x0b\x32\x1d.google.api.MonitoredResource\x12\x45\n\x06labels\x18\x03 \x03(\x0b\x32\x35.google.logging.v2.WriteLogEntriesRequest.LabelsEntry\x12,\n\x07\x65ntries\x18\x04 \x03(\x0b\x32\x1b.google.logging.v2.LogEntry\x12\x17\n\x0fpartial_success\x18\x05 \x01(\x08\x12\x0f\n\x07\x64ry_run\x18\x06 \x01(\x08\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x19\n\x17WriteLogEntriesResponse"\xc8\x01\n\x1cWriteLogEntriesPartialErrors\x12]\n\x10log_entry_errors\x18\x01 \x03(\x0b\x32\x43.google.logging.v2.WriteLogEntriesPartialErrors.LogEntryErrorsEntry\x1aI\n\x13LogEntryErrorsEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12!\n\x05value\x18\x02 \x01(\x0b\x32\x12.google.rpc.Status:\x02\x38\x01"\x91\x01\n\x15ListLogEntriesRequest\x12\x17\n\x0bproject_ids\x18\x01 \x03(\tB\x02\x18\x01\x12\x16\n\x0eresource_names\x18\x08 \x03(\t\x12\x0e\n\x06\x66ilter\x18\x02 \x01(\t\x12\x10\n\x08order_by\x18\x03 \x01(\t\x12\x11\n\tpage_size\x18\x04 \x01(\x05\x12\x12\n\npage_token\x18\x05 \x01(\t"_\n\x16ListLogEntriesResponse\x12,\n\x07\x65ntries\x18\x01 \x03(\x0b\x32\x1b.google.logging.v2.LogEntry\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"P\n\'ListMonitoredResourceDescriptorsRequest\x12\x11\n\tpage_size\x18\x01 \x01(\x05\x12\x12\n\npage_token\x18\x02 \x01(\t"\x8a\x01\n(ListMonitoredResourceDescriptorsResponse\x12\x45\n\x14resource_descriptors\x18\x01 \x03(\x0b\x32\'.google.api.MonitoredResourceDescriptor\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"H\n\x0fListLogsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t">\n\x10ListLogsResponse\x12\x11\n\tlog_names\x18\x03 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t2\x85\n\n\x10LoggingServiceV2\x12\x88\x02\n\tDeleteLog\x12#.google.logging.v2.DeleteLogRequest\x1a\x16.google.protobuf.Empty"\xbd\x01\x82\xd3\xe4\x93\x02\xb6\x01* /v2/{log_name=projects/*/logs/*}Z\x1b*\x19/v2/{log_name=*/*/logs/*}Z\'*%/v2/{log_name=organizations/*/logs/*}Z!*\x1f/v2/{log_name=folders/*/logs/*}Z)*\'/v2/{log_name=billingAccounts/*/logs/*}\x12\x86\x01\n\x0fWriteLogEntries\x12).google.logging.v2.WriteLogEntriesRequest\x1a*.google.logging.v2.WriteLogEntriesResponse"\x1c\x82\xd3\xe4\x93\x02\x16"\x11/v2/entries:write:\x01*\x12\x82\x01\n\x0eListLogEntries\x12(.google.logging.v2.ListLogEntriesRequest\x1a).google.logging.v2.ListLogEntriesResponse"\x1b\x82\xd3\xe4\x93\x02\x15"\x10/v2/entries:list:\x01*\x12\xc5\x01\n ListMonitoredResourceDescriptors\x12:.google.logging.v2.ListMonitoredResourceDescriptorsRequest\x1a;.google.logging.v2.ListMonitoredResourceDescriptorsResponse"(\x82\xd3\xe4\x93\x02"\x12 /v2/monitoredResourceDescriptors\x12\xff\x01\n\x08ListLogs\x12".google.logging.v2.ListLogsRequest\x1a#.google.logging.v2.ListLogsResponse"\xa9\x01\x82\xd3\xe4\x93\x02\xa2\x01\x12\x15/v2/{parent=*/*}/logsZ\x1e\x12\x1c/v2/{parent=projects/*}/logsZ#\x12!/v2/{parent=organizations/*}/logsZ\x1d\x12\x1b/v2/{parent=folders/*}/logsZ%\x12#/v2/{parent=billingAccounts/*}/logs\x1a\x8d\x02\xca\x41\x16logging.googleapis.com\xd2\x41\xf0\x01https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-platform.read-only,https://www.googleapis.com/auth/logging.admin,https://www.googleapis.com/auth/logging.read,https://www.googleapis.com/auth/logging.writeB\x98\x01\n\x15\x63om.google.logging.v2B\x0cLoggingProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3' - ), - dependencies=[ - google_dot_api_dot_monitored__resource__pb2.DESCRIPTOR, - google_dot_cloud_dot_logging__v2_dot_proto_dot_log__entry__pb2.DESCRIPTOR, - google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.DESCRIPTOR, - google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, - google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, - google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, - google_dot_rpc_dot_status__pb2.DESCRIPTOR, - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - google_dot_api_dot_client__pb2.DESCRIPTOR, - ], -) - - -_DELETELOGREQUEST = _descriptor.Descriptor( - name="DeleteLogRequest", - full_name="google.logging.v2.DeleteLogRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="log_name", - full_name="google.logging.v2.DeleteLogRequest.log_name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=376, - serialized_end=412, -) - - -_WRITELOGENTRIESREQUEST_LABELSENTRY = _descriptor.Descriptor( - name="LabelsEntry", - full_name="google.logging.v2.WriteLogEntriesRequest.LabelsEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.logging.v2.WriteLogEntriesRequest.LabelsEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.logging.v2.WriteLogEntriesRequest.LabelsEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b("8\001"), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=667, - serialized_end=712, -) - -_WRITELOGENTRIESREQUEST = _descriptor.Descriptor( - name="WriteLogEntriesRequest", - full_name="google.logging.v2.WriteLogEntriesRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="log_name", - full_name="google.logging.v2.WriteLogEntriesRequest.log_name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="resource", - full_name="google.logging.v2.WriteLogEntriesRequest.resource", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="labels", - full_name="google.logging.v2.WriteLogEntriesRequest.labels", - index=2, - number=3, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="entries", - full_name="google.logging.v2.WriteLogEntriesRequest.entries", - index=3, - number=4, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="partial_success", - full_name="google.logging.v2.WriteLogEntriesRequest.partial_success", - index=4, - number=5, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="dry_run", - full_name="google.logging.v2.WriteLogEntriesRequest.dry_run", - index=5, - number=6, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_WRITELOGENTRIESREQUEST_LABELSENTRY], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=415, - serialized_end=712, -) - - -_WRITELOGENTRIESRESPONSE = _descriptor.Descriptor( - name="WriteLogEntriesResponse", - full_name="google.logging.v2.WriteLogEntriesResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=714, - serialized_end=739, -) - - -_WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY = _descriptor.Descriptor( - name="LogEntryErrorsEntry", - full_name="google.logging.v2.WriteLogEntriesPartialErrors.LogEntryErrorsEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.logging.v2.WriteLogEntriesPartialErrors.LogEntryErrorsEntry.key", - index=0, - number=1, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.logging.v2.WriteLogEntriesPartialErrors.LogEntryErrorsEntry.value", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b("8\001"), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=869, - serialized_end=942, -) - -_WRITELOGENTRIESPARTIALERRORS = _descriptor.Descriptor( - name="WriteLogEntriesPartialErrors", - full_name="google.logging.v2.WriteLogEntriesPartialErrors", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="log_entry_errors", - full_name="google.logging.v2.WriteLogEntriesPartialErrors.log_entry_errors", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[_WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=742, - serialized_end=942, -) - - -_LISTLOGENTRIESREQUEST = _descriptor.Descriptor( - name="ListLogEntriesRequest", - full_name="google.logging.v2.ListLogEntriesRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="project_ids", - full_name="google.logging.v2.ListLogEntriesRequest.project_ids", - index=0, - number=1, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=_b("\030\001"), - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="resource_names", - full_name="google.logging.v2.ListLogEntriesRequest.resource_names", - index=1, - number=8, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="filter", - full_name="google.logging.v2.ListLogEntriesRequest.filter", - index=2, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="order_by", - full_name="google.logging.v2.ListLogEntriesRequest.order_by", - index=3, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.logging.v2.ListLogEntriesRequest.page_size", - index=4, - number=4, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.logging.v2.ListLogEntriesRequest.page_token", - index=5, - number=5, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=945, - serialized_end=1090, -) - - -_LISTLOGENTRIESRESPONSE = _descriptor.Descriptor( - name="ListLogEntriesResponse", - full_name="google.logging.v2.ListLogEntriesResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="entries", - full_name="google.logging.v2.ListLogEntriesResponse.entries", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.logging.v2.ListLogEntriesResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1092, - serialized_end=1187, -) - - -_LISTMONITOREDRESOURCEDESCRIPTORSREQUEST = _descriptor.Descriptor( - name="ListMonitoredResourceDescriptorsRequest", - full_name="google.logging.v2.ListMonitoredResourceDescriptorsRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.logging.v2.ListMonitoredResourceDescriptorsRequest.page_size", - index=0, - number=1, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.logging.v2.ListMonitoredResourceDescriptorsRequest.page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1189, - serialized_end=1269, -) - - -_LISTMONITOREDRESOURCEDESCRIPTORSRESPONSE = _descriptor.Descriptor( - name="ListMonitoredResourceDescriptorsResponse", - full_name="google.logging.v2.ListMonitoredResourceDescriptorsResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="resource_descriptors", - full_name="google.logging.v2.ListMonitoredResourceDescriptorsResponse.resource_descriptors", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.logging.v2.ListMonitoredResourceDescriptorsResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1272, - serialized_end=1410, -) - - -_LISTLOGSREQUEST = _descriptor.Descriptor( - name="ListLogsRequest", - full_name="google.logging.v2.ListLogsRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.logging.v2.ListLogsRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.logging.v2.ListLogsRequest.page_size", - index=1, - number=2, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.logging.v2.ListLogsRequest.page_token", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1412, - serialized_end=1484, -) - - -_LISTLOGSRESPONSE = _descriptor.Descriptor( - name="ListLogsResponse", - full_name="google.logging.v2.ListLogsResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="log_names", - full_name="google.logging.v2.ListLogsResponse.log_names", - index=0, - number=3, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.logging.v2.ListLogsResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1486, - serialized_end=1548, -) - -_WRITELOGENTRIESREQUEST_LABELSENTRY.containing_type = _WRITELOGENTRIESREQUEST -_WRITELOGENTRIESREQUEST.fields_by_name[ - "resource" -].message_type = google_dot_api_dot_monitored__resource__pb2._MONITOREDRESOURCE -_WRITELOGENTRIESREQUEST.fields_by_name[ - "labels" -].message_type = _WRITELOGENTRIESREQUEST_LABELSENTRY -_WRITELOGENTRIESREQUEST.fields_by_name[ - "entries" -].message_type = ( - google_dot_cloud_dot_logging__v2_dot_proto_dot_log__entry__pb2._LOGENTRY -) -_WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY.fields_by_name[ - "value" -].message_type = google_dot_rpc_dot_status__pb2._STATUS -_WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY.containing_type = ( - _WRITELOGENTRIESPARTIALERRORS -) -_WRITELOGENTRIESPARTIALERRORS.fields_by_name[ - "log_entry_errors" -].message_type = _WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY -_LISTLOGENTRIESRESPONSE.fields_by_name[ - "entries" -].message_type = ( - google_dot_cloud_dot_logging__v2_dot_proto_dot_log__entry__pb2._LOGENTRY -) -_LISTMONITOREDRESOURCEDESCRIPTORSRESPONSE.fields_by_name[ - "resource_descriptors" -].message_type = ( - google_dot_api_dot_monitored__resource__pb2._MONITOREDRESOURCEDESCRIPTOR -) -DESCRIPTOR.message_types_by_name["DeleteLogRequest"] = _DELETELOGREQUEST -DESCRIPTOR.message_types_by_name["WriteLogEntriesRequest"] = _WRITELOGENTRIESREQUEST -DESCRIPTOR.message_types_by_name["WriteLogEntriesResponse"] = _WRITELOGENTRIESRESPONSE -DESCRIPTOR.message_types_by_name[ - "WriteLogEntriesPartialErrors" -] = _WRITELOGENTRIESPARTIALERRORS -DESCRIPTOR.message_types_by_name["ListLogEntriesRequest"] = _LISTLOGENTRIESREQUEST -DESCRIPTOR.message_types_by_name["ListLogEntriesResponse"] = _LISTLOGENTRIESRESPONSE -DESCRIPTOR.message_types_by_name[ - "ListMonitoredResourceDescriptorsRequest" -] = _LISTMONITOREDRESOURCEDESCRIPTORSREQUEST -DESCRIPTOR.message_types_by_name[ - "ListMonitoredResourceDescriptorsResponse" -] = _LISTMONITOREDRESOURCEDESCRIPTORSRESPONSE -DESCRIPTOR.message_types_by_name["ListLogsRequest"] = _LISTLOGSREQUEST -DESCRIPTOR.message_types_by_name["ListLogsResponse"] = _LISTLOGSRESPONSE -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -DeleteLogRequest = _reflection.GeneratedProtocolMessageType( - "DeleteLogRequest", - (_message.Message,), - dict( - DESCRIPTOR=_DELETELOGREQUEST, - __module__="google.cloud.logging_v2.proto.logging_pb2", - __doc__="""The parameters to DeleteLog. - - - Attributes: - log_name: - Required. The resource name of the log to delete: :: - "projects/[PROJECT_ID]/logs/[LOG_ID]" - "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" - "folders/[FOLDER_ID]/logs/[LOG_ID]" ``[LOG_ID]`` must be URL- - encoded. For example, ``"projects/my-project- - id/logs/syslog"``, ``"organizations/1234567890/logs/cloudresou - rcemanager.googleapis.com%2Factivity"``. For more information - about log names, see [LogEntry][google.logging.v2.LogEntry]. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.DeleteLogRequest) - ), -) -_sym_db.RegisterMessage(DeleteLogRequest) - -WriteLogEntriesRequest = _reflection.GeneratedProtocolMessageType( - "WriteLogEntriesRequest", - (_message.Message,), - dict( - LabelsEntry=_reflection.GeneratedProtocolMessageType( - "LabelsEntry", - (_message.Message,), - dict( - DESCRIPTOR=_WRITELOGENTRIESREQUEST_LABELSENTRY, - __module__="google.cloud.logging_v2.proto.logging_pb2" - # @@protoc_insertion_point(class_scope:google.logging.v2.WriteLogEntriesRequest.LabelsEntry) - ), - ), - DESCRIPTOR=_WRITELOGENTRIESREQUEST, - __module__="google.cloud.logging_v2.proto.logging_pb2", - __doc__="""The parameters to WriteLogEntries. - - - Attributes: - log_name: - Optional. A default log resource name that is assigned to all - log entries in ``entries`` that do not specify a value for - ``log_name``: :: "projects/[PROJECT_ID]/logs/[LOG_ID]" - "organizations/[ORGANIZATION_ID]/logs/[LOG_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]" - "folders/[FOLDER_ID]/logs/[LOG_ID]" ``[LOG_ID]`` must be URL- - encoded. For example: :: "projects/my-project- - id/logs/syslog" "organizations/1234567890/logs/cloudresour - cemanager.googleapis.com%2Factivity" The permission - logging.logEntries.create is needed on each project, - organization, billing account, or folder that is receiving new - log entries, whether the resource is specified in logName or - in an individual log entry. - resource: - Optional. A default monitored resource object that is assigned - to all log entries in ``entries`` that do not specify a value - for ``resource``. Example: :: { "type": "gce_instance", - "labels": { "zone": "us-central1-a", "instance_id": - "00000000000000000000" }} See - [LogEntry][google.logging.v2.LogEntry]. - labels: - Optional. Default labels that are added to the ``labels`` - field of all log entries in ``entries``. If a log entry - already has a label with the same key as a label in this - parameter, then the log entry's label is not changed. See - [LogEntry][google.logging.v2.LogEntry]. - entries: - Required. The log entries to send to Logging. The order of log - entries in this list does not matter. Values supplied in this - method's ``log_name``, ``resource``, and ``labels`` fields are - copied into those log entries in this list that do not include - values for their corresponding fields. For more information, - see the [LogEntry][google.logging.v2.LogEntry] type. If the - ``timestamp`` or ``insert_id`` fields are missing in log - entries, then this method supplies the current time or a - unique identifier, respectively. The supplied values are - chosen so that, among the log entries that did not supply - their own values, the entries earlier in the list will sort - before the entries later in the list. See the ``entries.list`` - method. Log entries with timestamps that are more than the - `logs retention period `__ in the past - or more than 24 hours in the future will not be available when - calling ``entries.list``. However, those log entries can still - be `exported with LogSinks `__. To improve throughput and to avoid exceeding the - `quota limit `__ for calls to - ``entries.write``, you should try to include several log - entries in this list, rather than calling this method for each - individual log entry. - partial_success: - Optional. Whether valid entries should be written even if some - other entries fail due to INVALID\_ARGUMENT or - PERMISSION\_DENIED errors. If any entry is not written, then - the response status is the error associated with one of the - failed entries and the response includes error details keyed - by the entries' zero-based index in the ``entries.write`` - method. - dry_run: - Optional. If true, the request should expect normal response, - but the entries won't be persisted nor exported. Useful for - checking whether the logging API endpoints are working - properly before sending valuable data. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.WriteLogEntriesRequest) - ), -) -_sym_db.RegisterMessage(WriteLogEntriesRequest) -_sym_db.RegisterMessage(WriteLogEntriesRequest.LabelsEntry) - -WriteLogEntriesResponse = _reflection.GeneratedProtocolMessageType( - "WriteLogEntriesResponse", - (_message.Message,), - dict( - DESCRIPTOR=_WRITELOGENTRIESRESPONSE, - __module__="google.cloud.logging_v2.proto.logging_pb2", - __doc__="""Result returned from WriteLogEntries. empty - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.WriteLogEntriesResponse) - ), -) -_sym_db.RegisterMessage(WriteLogEntriesResponse) - -WriteLogEntriesPartialErrors = _reflection.GeneratedProtocolMessageType( - "WriteLogEntriesPartialErrors", - (_message.Message,), - dict( - LogEntryErrorsEntry=_reflection.GeneratedProtocolMessageType( - "LogEntryErrorsEntry", - (_message.Message,), - dict( - DESCRIPTOR=_WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY, - __module__="google.cloud.logging_v2.proto.logging_pb2" - # @@protoc_insertion_point(class_scope:google.logging.v2.WriteLogEntriesPartialErrors.LogEntryErrorsEntry) - ), - ), - DESCRIPTOR=_WRITELOGENTRIESPARTIALERRORS, - __module__="google.cloud.logging_v2.proto.logging_pb2", - __doc__="""Error details for WriteLogEntries with partial success. - - - Attributes: - log_entry_errors: - When ``WriteLogEntriesRequest.partial_success`` is true, - records the error status for entries that were not written due - to a permanent error, keyed by the entry's zero-based index in - ``WriteLogEntriesRequest.entries``. Failed requests for which - no entries are written will not include per-entry errors. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.WriteLogEntriesPartialErrors) - ), -) -_sym_db.RegisterMessage(WriteLogEntriesPartialErrors) -_sym_db.RegisterMessage(WriteLogEntriesPartialErrors.LogEntryErrorsEntry) - -ListLogEntriesRequest = _reflection.GeneratedProtocolMessageType( - "ListLogEntriesRequest", - (_message.Message,), - dict( - DESCRIPTOR=_LISTLOGENTRIESREQUEST, - __module__="google.cloud.logging_v2.proto.logging_pb2", - __doc__="""The parameters to ``ListLogEntries``. - - - Attributes: - project_ids: - Deprecated. Use ``resource_names`` instead. One or more - project identifiers or project numbers from which to retrieve - log entries. Example: ``"my-project-1A"``. - resource_names: - Required. Names of one or more parent resources from which to - retrieve log entries: :: "projects/[PROJECT_ID]" - "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" Projects listed in the ``project_ids`` - field are added to this list. - filter: - Optional. A filter that chooses which log entries to return. - See `Advanced Logs Filters - `__. Only log entries - that match the filter are returned. An empty filter matches - all log entries in the resources listed in ``resource_names``. - Referencing a parent resource that is not listed in - ``resource_names`` will cause the filter to return no results. - The maximum length of the filter is 20000 characters. - order_by: - Optional. How the results should be sorted. Presently, the - only permitted values are ``"timestamp asc"`` (default) and - ``"timestamp desc"``. The first option returns entries in - order of increasing values of ``LogEntry.timestamp`` (oldest - first), and the second option returns entries in order of - decreasing timestamps (newest first). Entries with equal - timestamps are returned in order of their ``insert_id`` - values. - page_size: - Optional. The maximum number of results to return from this - request. Non-positive values are ignored. The presence of - ``next_page_token`` in the response indicates that more - results might be available. - page_token: - Optional. If present, then retrieve the next batch of results - from the preceding call to this method. ``page_token`` must be - the value of ``next_page_token`` from the previous response. - The values of other method parameters should be identical to - those in the previous call. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.ListLogEntriesRequest) - ), -) -_sym_db.RegisterMessage(ListLogEntriesRequest) - -ListLogEntriesResponse = _reflection.GeneratedProtocolMessageType( - "ListLogEntriesResponse", - (_message.Message,), - dict( - DESCRIPTOR=_LISTLOGENTRIESRESPONSE, - __module__="google.cloud.logging_v2.proto.logging_pb2", - __doc__="""Result returned from ``ListLogEntries``. - - - Attributes: - entries: - A list of log entries. If ``entries`` is empty, - ``nextPageToken`` may still be returned, indicating that more - entries may exist. See ``nextPageToken`` for more information. - next_page_token: - If there might be more results than those appearing in this - response, then ``nextPageToken`` is included. To get the next - set of results, call this method again using the value of - ``nextPageToken`` as ``pageToken``. If a value for - ``next_page_token`` appears and the ``entries`` field is - empty, it means that the search found no log entries so far - but it did not have time to search all the possible log - entries. Retry the method with this value for ``page_token`` - to continue the search. Alternatively, consider speeding up - the search by changing your filter to specify a single log - name or resource type, or to narrow the time range of the - search. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.ListLogEntriesResponse) - ), -) -_sym_db.RegisterMessage(ListLogEntriesResponse) - -ListMonitoredResourceDescriptorsRequest = _reflection.GeneratedProtocolMessageType( - "ListMonitoredResourceDescriptorsRequest", - (_message.Message,), - dict( - DESCRIPTOR=_LISTMONITOREDRESOURCEDESCRIPTORSREQUEST, - __module__="google.cloud.logging_v2.proto.logging_pb2", - __doc__="""The parameters to ListMonitoredResourceDescriptors - - - Attributes: - page_size: - Optional. The maximum number of results to return from this - request. Non-positive values are ignored. The presence of - ``nextPageToken`` in the response indicates that more results - might be available. - page_token: - Optional. If present, then retrieve the next batch of results - from the preceding call to this method. ``pageToken`` must be - the value of ``nextPageToken`` from the previous response. The - values of other method parameters should be identical to those - in the previous call. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.ListMonitoredResourceDescriptorsRequest) - ), -) -_sym_db.RegisterMessage(ListMonitoredResourceDescriptorsRequest) - -ListMonitoredResourceDescriptorsResponse = _reflection.GeneratedProtocolMessageType( - "ListMonitoredResourceDescriptorsResponse", - (_message.Message,), - dict( - DESCRIPTOR=_LISTMONITOREDRESOURCEDESCRIPTORSRESPONSE, - __module__="google.cloud.logging_v2.proto.logging_pb2", - __doc__="""Result returned from ListMonitoredResourceDescriptors. - - - Attributes: - resource_descriptors: - A list of resource descriptors. - next_page_token: - If there might be more results than those appearing in this - response, then ``nextPageToken`` is included. To get the next - set of results, call this method again using the value of - ``nextPageToken`` as ``pageToken``. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.ListMonitoredResourceDescriptorsResponse) - ), -) -_sym_db.RegisterMessage(ListMonitoredResourceDescriptorsResponse) - -ListLogsRequest = _reflection.GeneratedProtocolMessageType( - "ListLogsRequest", - (_message.Message,), - dict( - DESCRIPTOR=_LISTLOGSREQUEST, - __module__="google.cloud.logging_v2.proto.logging_pb2", - __doc__="""The parameters to ListLogs. - - - Attributes: - parent: - Required. The resource name that owns the logs: :: - "projects/[PROJECT_ID]" "organizations/[ORGANIZATION_ID]" - "billingAccounts/[BILLING_ACCOUNT_ID]" - "folders/[FOLDER_ID]" - page_size: - Optional. The maximum number of results to return from this - request. Non-positive values are ignored. The presence of - ``nextPageToken`` in the response indicates that more results - might be available. - page_token: - Optional. If present, then retrieve the next batch of results - from the preceding call to this method. ``pageToken`` must be - the value of ``nextPageToken`` from the previous response. The - values of other method parameters should be identical to those - in the previous call. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.ListLogsRequest) - ), -) -_sym_db.RegisterMessage(ListLogsRequest) - -ListLogsResponse = _reflection.GeneratedProtocolMessageType( - "ListLogsResponse", - (_message.Message,), - dict( - DESCRIPTOR=_LISTLOGSRESPONSE, - __module__="google.cloud.logging_v2.proto.logging_pb2", - __doc__="""Result returned from ListLogs. - - - Attributes: - log_names: - A list of log names. For example, ``"projects/my- - project/logs/syslog"`` or ``"organizations/123/logs/cloudresou - rcemanager.googleapis.com%2Factivity"``. - next_page_token: - If there might be more results than those appearing in this - response, then ``nextPageToken`` is included. To get the next - set of results, call this method again using the value of - ``nextPageToken`` as ``pageToken``. - """, - # @@protoc_insertion_point(class_scope:google.logging.v2.ListLogsResponse) - ), -) -_sym_db.RegisterMessage(ListLogsResponse) - - -DESCRIPTOR._options = None -_WRITELOGENTRIESREQUEST_LABELSENTRY._options = None -_WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY._options = None -_LISTLOGENTRIESREQUEST.fields_by_name["project_ids"]._options = None - -_LOGGINGSERVICEV2 = _descriptor.ServiceDescriptor( - name="LoggingServiceV2", - full_name="google.logging.v2.LoggingServiceV2", - file=DESCRIPTOR, - index=0, - serialized_options=_b( - "\312A\026logging.googleapis.com\322A\360\001https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-platform.read-only,https://www.googleapis.com/auth/logging.admin,https://www.googleapis.com/auth/logging.read,https://www.googleapis.com/auth/logging.write" - ), - serialized_start=1551, - serialized_end=2836, - methods=[ - _descriptor.MethodDescriptor( - name="DeleteLog", - full_name="google.logging.v2.LoggingServiceV2.DeleteLog", - index=0, - containing_service=None, - input_type=_DELETELOGREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=_b( - "\202\323\344\223\002\266\001* /v2/{log_name=projects/*/logs/*}Z\033*\031/v2/{log_name=*/*/logs/*}Z'*%/v2/{log_name=organizations/*/logs/*}Z!*\037/v2/{log_name=folders/*/logs/*}Z)*'/v2/{log_name=billingAccounts/*/logs/*}" - ), - ), - _descriptor.MethodDescriptor( - name="WriteLogEntries", - full_name="google.logging.v2.LoggingServiceV2.WriteLogEntries", - index=1, - containing_service=None, - input_type=_WRITELOGENTRIESREQUEST, - output_type=_WRITELOGENTRIESRESPONSE, - serialized_options=_b( - '\202\323\344\223\002\026"\021/v2/entries:write:\001*' - ), - ), - _descriptor.MethodDescriptor( - name="ListLogEntries", - full_name="google.logging.v2.LoggingServiceV2.ListLogEntries", - index=2, - containing_service=None, - input_type=_LISTLOGENTRIESREQUEST, - output_type=_LISTLOGENTRIESRESPONSE, - serialized_options=_b( - '\202\323\344\223\002\025"\020/v2/entries:list:\001*' - ), - ), - _descriptor.MethodDescriptor( - name="ListMonitoredResourceDescriptors", - full_name="google.logging.v2.LoggingServiceV2.ListMonitoredResourceDescriptors", - index=3, - containing_service=None, - input_type=_LISTMONITOREDRESOURCEDESCRIPTORSREQUEST, - output_type=_LISTMONITOREDRESOURCEDESCRIPTORSRESPONSE, - serialized_options=_b( - '\202\323\344\223\002"\022 /v2/monitoredResourceDescriptors' - ), - ), - _descriptor.MethodDescriptor( - name="ListLogs", - full_name="google.logging.v2.LoggingServiceV2.ListLogs", - index=4, - containing_service=None, - input_type=_LISTLOGSREQUEST, - output_type=_LISTLOGSRESPONSE, - serialized_options=_b( - "\202\323\344\223\002\242\001\022\025/v2/{parent=*/*}/logsZ\036\022\034/v2/{parent=projects/*}/logsZ#\022!/v2/{parent=organizations/*}/logsZ\035\022\033/v2/{parent=folders/*}/logsZ%\022#/v2/{parent=billingAccounts/*}/logs" - ), - ), - ], -) -_sym_db.RegisterServiceDescriptor(_LOGGINGSERVICEV2) - -DESCRIPTOR.services_by_name["LoggingServiceV2"] = _LOGGINGSERVICEV2 - -# @@protoc_insertion_point(module_scope) diff --git a/logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/logging_pb2_grpc.py b/logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/logging_pb2_grpc.py deleted file mode 100644 index 2a2b3656925c..000000000000 --- a/logging/google/cloud/logging_v2/proto/cloud/logging_v2/proto/logging_pb2_grpc.py +++ /dev/null @@ -1,130 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc - -from google.cloud.logging_v2.proto import ( - logging_pb2 as google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2, -) -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 - - -class LoggingServiceV2Stub(object): - """Service for ingesting and querying logs. - """ - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.DeleteLog = channel.unary_unary( - "/google.logging.v2.LoggingServiceV2/DeleteLog", - request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.DeleteLogRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.WriteLogEntries = channel.unary_unary( - "/google.logging.v2.LoggingServiceV2/WriteLogEntries", - request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.WriteLogEntriesRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.WriteLogEntriesResponse.FromString, - ) - self.ListLogEntries = channel.unary_unary( - "/google.logging.v2.LoggingServiceV2/ListLogEntries", - request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogEntriesRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogEntriesResponse.FromString, - ) - self.ListMonitoredResourceDescriptors = channel.unary_unary( - "/google.logging.v2.LoggingServiceV2/ListMonitoredResourceDescriptors", - request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListMonitoredResourceDescriptorsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListMonitoredResourceDescriptorsResponse.FromString, - ) - self.ListLogs = channel.unary_unary( - "/google.logging.v2.LoggingServiceV2/ListLogs", - request_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogsResponse.FromString, - ) - - -class LoggingServiceV2Servicer(object): - """Service for ingesting and querying logs. - """ - - def DeleteLog(self, request, context): - """Deletes all the log entries in a log. - The log reappears if it receives new entries. - Log entries written shortly before the delete operation might not be - deleted. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def WriteLogEntries(self, request, context): - """Writes log entries to Logging. This API method is the - only way to send log entries to Logging. This method - is used, directly or indirectly, by the Logging agent - (fluentd) and all logging libraries configured to use Logging. - A single request may contain log entries for a maximum of 1000 - different resources (projects, organizations, billing accounts or - folders) - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListLogEntries(self, request, context): - """Lists log entries. Use this method to retrieve log entries that originated - from a project/folder/organization/billing account. For ways to export log - entries, see [Exporting Logs](/logging/docs/export). - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListMonitoredResourceDescriptors(self, request, context): - """Lists the descriptors for monitored resource types used by Logging. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListLogs(self, request, context): - """Lists the logs in projects, organizations, folders, or billing accounts. - Only logs that have entries are listed. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - -def add_LoggingServiceV2Servicer_to_server(servicer, server): - rpc_method_handlers = { - "DeleteLog": grpc.unary_unary_rpc_method_handler( - servicer.DeleteLog, - request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.DeleteLogRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - "WriteLogEntries": grpc.unary_unary_rpc_method_handler( - servicer.WriteLogEntries, - request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.WriteLogEntriesRequest.FromString, - response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.WriteLogEntriesResponse.SerializeToString, - ), - "ListLogEntries": grpc.unary_unary_rpc_method_handler( - servicer.ListLogEntries, - request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogEntriesRequest.FromString, - response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogEntriesResponse.SerializeToString, - ), - "ListMonitoredResourceDescriptors": grpc.unary_unary_rpc_method_handler( - servicer.ListMonitoredResourceDescriptors, - request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListMonitoredResourceDescriptorsRequest.FromString, - response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListMonitoredResourceDescriptorsResponse.SerializeToString, - ), - "ListLogs": grpc.unary_unary_rpc_method_handler( - servicer.ListLogs, - request_deserializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogsRequest.FromString, - response_serializer=google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__pb2.ListLogsResponse.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - "google.logging.v2.LoggingServiceV2", rpc_method_handlers - ) - server.add_generic_rpc_handlers((generic_handler,)) diff --git a/logging/google/cloud/logging_v2/proto/http_request.proto b/logging/google/cloud/logging_v2/proto/http_request.proto deleted file mode 100644 index 21b1367ab8ca..000000000000 --- a/logging/google/cloud/logging_v2/proto/http_request.proto +++ /dev/null @@ -1,93 +0,0 @@ -// Copyright 2018 Google LLC. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -syntax = "proto3"; - -package google.logging.type; - -import "google/api/annotations.proto"; -import "google/protobuf/duration.proto"; - -option csharp_namespace = "Google.Cloud.Logging.Type"; -option go_package = "google.golang.org/genproto/googleapis/logging/type;ltype"; -option java_multiple_files = true; -option java_outer_classname = "HttpRequestProto"; -option java_package = "com.google.logging.type"; -option php_namespace = "Google\\Cloud\\Logging\\Type"; - - -// A common proto for logging HTTP requests. Only contains semantics -// defined by the HTTP specification. Product-specific logging -// information MUST be defined in a separate message. -message HttpRequest { - // The request method. Examples: `"GET"`, `"HEAD"`, `"PUT"`, `"POST"`. - string request_method = 1; - - // The scheme (http, https), the host name, the path and the query - // portion of the URL that was requested. - // Example: `"http://example.com/some/info?color=red"`. - string request_url = 2; - - // The size of the HTTP request message in bytes, including the request - // headers and the request body. - int64 request_size = 3; - - // The response code indicating the status of response. - // Examples: 200, 404. - int32 status = 4; - - // The size of the HTTP response message sent back to the client, in bytes, - // including the response headers and the response body. - int64 response_size = 5; - - // The user agent sent by the client. Example: - // `"Mozilla/4.0 (compatible; MSIE 6.0; Windows 98; Q312461; .NET CLR 1.0.3705)"`. - string user_agent = 6; - - // The IP address (IPv4 or IPv6) of the client that issued the HTTP - // request. Examples: `"192.168.1.1"`, `"FE80::0202:B3FF:FE1E:8329"`. - string remote_ip = 7; - - // The IP address (IPv4 or IPv6) of the origin server that the request was - // sent to. - string server_ip = 13; - - // The referer URL of the request, as defined in - // [HTTP/1.1 Header Field Definitions](http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html). - string referer = 8; - - // The request processing latency on the server, from the time the request was - // received until the response was sent. - google.protobuf.Duration latency = 14; - - // Whether or not a cache lookup was attempted. - bool cache_lookup = 11; - - // Whether or not an entity was served from cache - // (with or without validation). - bool cache_hit = 9; - - // Whether or not the response was validated with the origin server before - // being served from cache. This field is only meaningful if `cache_hit` is - // True. - bool cache_validated_with_origin_server = 10; - - // The number of HTTP response bytes inserted into cache. Set only when a - // cache fill was attempted. - int64 cache_fill_bytes = 12; - - // Protocol used for the request. Examples: "HTTP/1.1", "HTTP/2", "websocket" - string protocol = 15; -} diff --git a/logging/google/cloud/logging_v2/proto/log_entry_pb2.py b/logging/google/cloud/logging_v2/proto/log_entry_pb2.py index 1d3af3c42416..c2517d84adae 100644 --- a/logging/google/cloud/logging_v2/proto/log_entry_pb2.py +++ b/logging/google/cloud/logging_v2/proto/log_entry_pb2.py @@ -1,6 +1,4 @@ # -*- coding: utf-8 -*- -# -*- coding: utf-8 -*- -# -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: google/cloud/logging_v2/proto/log_entry.proto @@ -17,7 +15,6 @@ _sym_db = _symbol_database.Default() -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 from google.api import ( monitored_resource_pb2 as google_dot_api_dot_monitored__resource__pb2, ) @@ -30,6 +27,8 @@ from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2 from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from google.rpc import status_pb2 as google_dot_rpc_dot_status__pb2 +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 DESCRIPTOR = _descriptor.FileDescriptor( @@ -40,16 +39,17 @@ "\n\025com.google.logging.v2B\rLogEntryProtoP\001Z8google.golang.org/genproto/googleapis/logging/v2;logging\370\001\001\252\002\027Google.Cloud.Logging.V2\312\002\027Google\\Cloud\\Logging\\V2" ), serialized_pb=_b( - '\n-google/cloud/logging_v2/proto/log_entry.proto\x12\x11google.logging.v2\x1a\x1cgoogle/api/annotations.proto\x1a#google/api/monitored_resource.proto\x1a&google/logging/type/http_request.proto\x1a&google/logging/type/log_severity.proto\x1a\x19google/protobuf/any.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\x8a\x06\n\x08LogEntry\x12\x10\n\x08log_name\x18\x0c \x01(\t\x12/\n\x08resource\x18\x08 \x01(\x0b\x32\x1d.google.api.MonitoredResource\x12-\n\rproto_payload\x18\x02 \x01(\x0b\x32\x14.google.protobuf.AnyH\x00\x12\x16\n\x0ctext_payload\x18\x03 \x01(\tH\x00\x12/\n\x0cjson_payload\x18\x06 \x01(\x0b\x32\x17.google.protobuf.StructH\x00\x12-\n\ttimestamp\x18\t \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x35\n\x11receive_timestamp\x18\x18 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x32\n\x08severity\x18\n \x01(\x0e\x32 .google.logging.type.LogSeverity\x12\x11\n\tinsert_id\x18\x04 \x01(\t\x12\x36\n\x0chttp_request\x18\x07 \x01(\x0b\x32 .google.logging.type.HttpRequest\x12\x37\n\x06labels\x18\x0b \x03(\x0b\x32\'.google.logging.v2.LogEntry.LabelsEntry\x12\x37\n\x08metadata\x18\x19 \x01(\x0b\x32%.google.api.MonitoredResourceMetadata\x12\x37\n\toperation\x18\x0f \x01(\x0b\x32$.google.logging.v2.LogEntryOperation\x12\r\n\x05trace\x18\x16 \x01(\t\x12\x0f\n\x07span_id\x18\x1b \x01(\t\x12\x15\n\rtrace_sampled\x18\x1e \x01(\x08\x12\x42\n\x0fsource_location\x18\x17 \x01(\x0b\x32).google.logging.v2.LogEntrySourceLocation\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\t\n\x07payload"N\n\x11LogEntryOperation\x12\n\n\x02id\x18\x01 \x01(\t\x12\x10\n\x08producer\x18\x02 \x01(\t\x12\r\n\x05\x66irst\x18\x03 \x01(\x08\x12\x0c\n\x04last\x18\x04 \x01(\x08"F\n\x16LogEntrySourceLocation\x12\x0c\n\x04\x66ile\x18\x01 \x01(\t\x12\x0c\n\x04line\x18\x02 \x01(\x03\x12\x10\n\x08\x66unction\x18\x03 \x01(\tB\x99\x01\n\x15\x63om.google.logging.v2B\rLogEntryProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3' + '\n-google/cloud/logging_v2/proto/log_entry.proto\x12\x11google.logging.v2\x1a#google/api/monitored_resource.proto\x1a&google/logging/type/http_request.proto\x1a&google/logging/type/log_severity.proto\x1a\x19google/protobuf/any.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto\x1a\x1cgoogle/api/annotations.proto"\x8e\x06\n\x08LogEntry\x12\x10\n\x08log_name\x18\x0c \x01(\t\x12/\n\x08resource\x18\x08 \x01(\x0b\x32\x1d.google.api.MonitoredResource\x12-\n\rproto_payload\x18\x02 \x01(\x0b\x32\x14.google.protobuf.AnyH\x00\x12\x16\n\x0ctext_payload\x18\x03 \x01(\tH\x00\x12/\n\x0cjson_payload\x18\x06 \x01(\x0b\x32\x17.google.protobuf.StructH\x00\x12-\n\ttimestamp\x18\t \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x35\n\x11receive_timestamp\x18\x18 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x32\n\x08severity\x18\n \x01(\x0e\x32 .google.logging.type.LogSeverity\x12\x11\n\tinsert_id\x18\x04 \x01(\t\x12\x36\n\x0chttp_request\x18\x07 \x01(\x0b\x32 .google.logging.type.HttpRequest\x12\x37\n\x06labels\x18\x0b \x03(\x0b\x32\'.google.logging.v2.LogEntry.LabelsEntry\x12;\n\x08metadata\x18\x19 \x01(\x0b\x32%.google.api.MonitoredResourceMetadataB\x02\x18\x01\x12\x37\n\toperation\x18\x0f \x01(\x0b\x32$.google.logging.v2.LogEntryOperation\x12\r\n\x05trace\x18\x16 \x01(\t\x12\x0f\n\x07span_id\x18\x1b \x01(\t\x12\x15\n\rtrace_sampled\x18\x1e \x01(\x08\x12\x42\n\x0fsource_location\x18\x17 \x01(\x0b\x32).google.logging.v2.LogEntrySourceLocation\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\t\n\x07payload"N\n\x11LogEntryOperation\x12\n\n\x02id\x18\x01 \x01(\t\x12\x10\n\x08producer\x18\x02 \x01(\t\x12\r\n\x05\x66irst\x18\x03 \x01(\x08\x12\x0c\n\x04last\x18\x04 \x01(\x08"F\n\x16LogEntrySourceLocation\x12\x0c\n\x04\x66ile\x18\x01 \x01(\t\x12\x0c\n\x04line\x18\x02 \x01(\x03\x12\x10\n\x08\x66unction\x18\x03 \x01(\tB\x99\x01\n\x15\x63om.google.logging.v2B\rLogEntryProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3' ), dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, google_dot_api_dot_monitored__resource__pb2.DESCRIPTOR, google_dot_logging_dot_type_dot_http__request__pb2.DESCRIPTOR, google_dot_logging_dot_type_dot_log__severity__pb2.DESCRIPTOR, google_dot_protobuf_dot_any__pb2.DESCRIPTOR, google_dot_protobuf_dot_struct__pb2.DESCRIPTOR, google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, + google_dot_rpc_dot_status__pb2.DESCRIPTOR, + google_dot_api_dot_annotations__pb2.DESCRIPTOR, ], ) @@ -106,8 +106,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1028, - serialized_end=1073, + serialized_start=1057, + serialized_end=1102, ) _LOGENTRY = _descriptor.Descriptor( @@ -330,7 +330,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\030\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -425,7 +425,7 @@ ), ], extensions=[], - nested_types=[_LOGENTRY_LABELSENTRY], + nested_types=[_LOGENTRY_LABELSENTRY,], enum_types=[], serialized_options=None, is_extendable=False, @@ -438,10 +438,10 @@ index=0, containing_type=None, fields=[], - ) + ), ], - serialized_start=306, - serialized_end=1084, + serialized_start=331, + serialized_end=1113, ) @@ -533,8 +533,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1086, - serialized_end=1164, + serialized_start=1115, + serialized_end=1193, ) @@ -608,8 +608,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1166, - serialized_end=1236, + serialized_start=1195, + serialized_end=1265, ) _LOGENTRY_LABELSENTRY.containing_type = _LOGENTRY @@ -704,17 +704,20 @@ for a log name with a leading slash will never return any results. resource: - Required. The primary monitored resource associated with this - log entry. Example: a log entry that reports a database error - would be associated with the monitored resource designating - the particular database that reported the error. + Required. The monitored resource that produced this log entry. + Example: a log entry that reports a database error would be + associated with the monitored resource designating the + particular database that reported the error. payload: Optional. The log entry payload, which can be one of multiple types. proto_payload: The log entry payload, represented as a protocol buffer. Some Google Cloud Platform services use this field for their log - entry payloads. + entry payloads. The following protocol buffer types are + supported; user-defined types are not supported: + "type.googleapis.com/google.cloud.audit.AuditLog" + "type.googleapis.com/google.appengine.logging.v1.RequestLog" text_payload: The log entry payload, represented as a Unicode string (UTF-8). @@ -733,7 +736,7 @@ `__ in the past, and no more than 24 hours in the future. Log entries outside those time boundaries will not be available when calling ``entries.list``, but those log - entries can still be exported with `LogSinks + entries can still be `exported with LogSinks `__. receive_timestamp: Output only. The time the log entry was received by Logging. @@ -755,9 +758,17 @@ Optional. A set of user-defined (key, value) data that provides additional information about the log entry. metadata: - Output only. Additional metadata about the monitored resource. - Only ``k8s_container``, ``k8s_pod``, and ``k8s_node`` - MonitoredResources have this field populated. + Deprecated. Output only. Additional metadata about the + monitored resource. Only ``k8s_container``, ``k8s_pod``, and + ``k8s_node`` MonitoredResources have this field populated for + GKE versions older than 1.12.6. For GKE versions 1.12.6 and + above, the ``metadata`` field has been deprecated. The + Kubernetes pod labels that used to be in + ``metadata.userLabels`` will now be present in the ``labels`` + field with a key prefix of ``k8s-pod/``. The Stackdriver + system labels that were present in the + ``metadata.systemLabels`` field will no longer be available in + the LogEntry. operation: Optional. Information about an operation associated with the log entry, if applicable. @@ -769,12 +780,12 @@ projectid/traces/06796866738c859f2f19b7cfb3214824`` span_id: Optional. The span ID within the trace associated with the log - entry. For Trace spans, this is the same format that the Trace - API v2 uses: a 16-character hexadecimal encoding of an 8-byte - array, such as "000000000000004a". + entry. For Trace spans, this is the same format that the + Trace API v2 uses: a 16-character hexadecimal encoding of an + 8-byte array, such as "000000000000004a". trace_sampled: Optional. The sampling decision of the trace associated with - the log entry. True means that the trace resource name in the + the log entry. True means that the trace resource name in the ``trace`` field was sampled for storage in a trace backend. False means that the trace was not sampled for storage when this log entry was written, or the sampling decision was @@ -858,4 +869,5 @@ DESCRIPTOR._options = None _LOGENTRY_LABELSENTRY._options = None +_LOGENTRY.fields_by_name["metadata"]._options = None # @@protoc_insertion_point(module_scope) diff --git a/logging/google/cloud/logging_v2/proto/log_severity.proto b/logging/google/cloud/logging_v2/proto/log_severity.proto deleted file mode 100644 index ccb08cacb445..000000000000 --- a/logging/google/cloud/logging_v2/proto/log_severity.proto +++ /dev/null @@ -1,73 +0,0 @@ -// Copyright 2018 Google LLC. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -syntax = "proto3"; - -package google.logging.type; - -import "google/api/annotations.proto"; - -option csharp_namespace = "Google.Cloud.Logging.Type"; -option go_package = "google.golang.org/genproto/googleapis/logging/type;ltype"; -option java_multiple_files = true; -option java_outer_classname = "LogSeverityProto"; -option java_package = "com.google.logging.type"; -option php_namespace = "Google\\Cloud\\Logging\\Type"; - - -// The severity of the event described in a log entry, expressed as one of the -// standard severity levels listed below. For your reference, the levels are -// assigned the listed numeric values. The effect of using numeric values other -// than those listed is undefined. -// -// You can filter for log entries by severity. For example, the following -// filter expression will match log entries with severities `INFO`, `NOTICE`, -// and `WARNING`: -// -// severity > DEBUG AND severity <= WARNING -// -// If you are writing log entries, you should map other severity encodings to -// one of these standard levels. For example, you might map all of Java's FINE, -// FINER, and FINEST levels to `LogSeverity.DEBUG`. You can preserve the -// original severity level in the log entry payload if you wish. -enum LogSeverity { - // (0) The log entry has no assigned severity level. - DEFAULT = 0; - - // (100) Debug or trace information. - DEBUG = 100; - - // (200) Routine information, such as ongoing status or performance. - INFO = 200; - - // (300) Normal but significant events, such as start up, shut down, or - // a configuration change. - NOTICE = 300; - - // (400) Warning events might cause problems. - WARNING = 400; - - // (500) Error events are likely to cause problems. - ERROR = 500; - - // (600) Critical events cause more severe problems or outages. - CRITICAL = 600; - - // (700) A person must take an action immediately. - ALERT = 700; - - // (800) One or more systems are unusable. - EMERGENCY = 800; -} diff --git a/logging/google/cloud/logging_v2/proto/logging/type/http_request_pb2.py b/logging/google/cloud/logging_v2/proto/logging/type/http_request_pb2.py deleted file mode 100644 index cd065d8a9311..000000000000 --- a/logging/google/cloud/logging_v2/proto/logging/type/http_request_pb2.py +++ /dev/null @@ -1,405 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/logging/type/http_request.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/logging/type/http_request.proto", - package="google.logging.type", - syntax="proto3", - serialized_options=_b( - "\n\027com.google.logging.typeB\020HttpRequestProtoP\001Z8google.golang.org/genproto/googleapis/logging/type;ltype\252\002\031Google.Cloud.Logging.Type\312\002\031Google\\Cloud\\Logging\\Type" - ), - serialized_pb=_b( - '\n&google/logging/type/http_request.proto\x12\x13google.logging.type\x1a\x1egoogle/protobuf/duration.proto\x1a\x1cgoogle/api/annotations.proto"\xef\x02\n\x0bHttpRequest\x12\x16\n\x0erequest_method\x18\x01 \x01(\t\x12\x13\n\x0brequest_url\x18\x02 \x01(\t\x12\x14\n\x0crequest_size\x18\x03 \x01(\x03\x12\x0e\n\x06status\x18\x04 \x01(\x05\x12\x15\n\rresponse_size\x18\x05 \x01(\x03\x12\x12\n\nuser_agent\x18\x06 \x01(\t\x12\x11\n\tremote_ip\x18\x07 \x01(\t\x12\x11\n\tserver_ip\x18\r \x01(\t\x12\x0f\n\x07referer\x18\x08 \x01(\t\x12*\n\x07latency\x18\x0e \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x14\n\x0c\x63\x61\x63he_lookup\x18\x0b \x01(\x08\x12\x11\n\tcache_hit\x18\t \x01(\x08\x12*\n"cache_validated_with_origin_server\x18\n \x01(\x08\x12\x18\n\x10\x63\x61\x63he_fill_bytes\x18\x0c \x01(\x03\x12\x10\n\x08protocol\x18\x0f \x01(\tB\x9f\x01\n\x17\x63om.google.logging.typeB\x10HttpRequestProtoP\x01Z8google.golang.org/genproto/googleapis/logging/type;ltype\xaa\x02\x19Google.Cloud.Logging.Type\xca\x02\x19Google\\Cloud\\Logging\\Typeb\x06proto3' - ), - dependencies=[ - google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - ], -) - - -_HTTPREQUEST = _descriptor.Descriptor( - name="HttpRequest", - full_name="google.logging.type.HttpRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="request_method", - full_name="google.logging.type.HttpRequest.request_method", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="request_url", - full_name="google.logging.type.HttpRequest.request_url", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="request_size", - full_name="google.logging.type.HttpRequest.request_size", - index=2, - number=3, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="status", - full_name="google.logging.type.HttpRequest.status", - index=3, - number=4, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="response_size", - full_name="google.logging.type.HttpRequest.response_size", - index=4, - number=5, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="user_agent", - full_name="google.logging.type.HttpRequest.user_agent", - index=5, - number=6, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="remote_ip", - full_name="google.logging.type.HttpRequest.remote_ip", - index=6, - number=7, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="server_ip", - full_name="google.logging.type.HttpRequest.server_ip", - index=7, - number=13, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="referer", - full_name="google.logging.type.HttpRequest.referer", - index=8, - number=8, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="latency", - full_name="google.logging.type.HttpRequest.latency", - index=9, - number=14, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="cache_lookup", - full_name="google.logging.type.HttpRequest.cache_lookup", - index=10, - number=11, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="cache_hit", - full_name="google.logging.type.HttpRequest.cache_hit", - index=11, - number=9, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="cache_validated_with_origin_server", - full_name="google.logging.type.HttpRequest.cache_validated_with_origin_server", - index=12, - number=10, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="cache_fill_bytes", - full_name="google.logging.type.HttpRequest.cache_fill_bytes", - index=13, - number=12, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="protocol", - full_name="google.logging.type.HttpRequest.protocol", - index=14, - number=15, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=126, - serialized_end=493, -) - -_HTTPREQUEST.fields_by_name[ - "latency" -].message_type = google_dot_protobuf_dot_duration__pb2._DURATION -DESCRIPTOR.message_types_by_name["HttpRequest"] = _HTTPREQUEST -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -HttpRequest = _reflection.GeneratedProtocolMessageType( - "HttpRequest", - (_message.Message,), - dict( - DESCRIPTOR=_HTTPREQUEST, - __module__="google.logging.type.http_request_pb2", - __doc__="""A common proto for logging HTTP requests. Only contains semantics - defined by the HTTP specification. Product-specific logging information - MUST be defined in a separate message. - - - Attributes: - request_method: - The request method. Examples: ``"GET"``, ``"HEAD"``, - ``"PUT"``, ``"POST"``. - request_url: - The scheme (http, https), the host name, the path and the - query portion of the URL that was requested. Example: - ``"http://example.com/some/info?color=red"``. - request_size: - The size of the HTTP request message in bytes, including the - request headers and the request body. - status: - The response code indicating the status of response. Examples: - 200, 404. - response_size: - The size of the HTTP response message sent back to the client, - in bytes, including the response headers and the response - body. - user_agent: - The user agent sent by the client. Example: ``"Mozilla/4.0 - (compatible; MSIE 6.0; Windows 98; Q312461; .NET CLR - 1.0.3705)"``. - remote_ip: - The IP address (IPv4 or IPv6) of the client that issued the - HTTP request. Examples: ``"192.168.1.1"``, - ``"FE80::0202:B3FF:FE1E:8329"``. - server_ip: - The IP address (IPv4 or IPv6) of the origin server that the - request was sent to. - referer: - The referer URL of the request, as defined in `HTTP/1.1 Header - Field Definitions - `__. - latency: - The request processing latency on the server, from the time - the request was received until the response was sent. - cache_lookup: - Whether or not a cache lookup was attempted. - cache_hit: - Whether or not an entity was served from cache (with or - without validation). - cache_validated_with_origin_server: - Whether or not the response was validated with the origin - server before being served from cache. This field is only - meaningful if ``cache_hit`` is True. - cache_fill_bytes: - The number of HTTP response bytes inserted into cache. Set - only when a cache fill was attempted. - protocol: - Protocol used for the request. Examples: "HTTP/1.1", "HTTP/2", - "websocket" - """, - # @@protoc_insertion_point(class_scope:google.logging.type.HttpRequest) - ), -) -_sym_db.RegisterMessage(HttpRequest) - - -DESCRIPTOR._options = None -# @@protoc_insertion_point(module_scope) diff --git a/logging/google/cloud/logging_v2/proto/logging/type/log_severity_pb2.py b/logging/google/cloud/logging_v2/proto/logging/type/log_severity_pb2.py deleted file mode 100644 index bc429a3fca0b..000000000000 --- a/logging/google/cloud/logging_v2/proto/logging/type/log_severity_pb2.py +++ /dev/null @@ -1,93 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/logging/type/log_severity.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf.internal import enum_type_wrapper -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/logging/type/log_severity.proto", - package="google.logging.type", - syntax="proto3", - serialized_options=_b( - "\n\027com.google.logging.typeB\020LogSeverityProtoP\001Z8google.golang.org/genproto/googleapis/logging/type;ltype\252\002\031Google.Cloud.Logging.Type\312\002\031Google\\Cloud\\Logging\\Type" - ), - serialized_pb=_b( - "\n&google/logging/type/log_severity.proto\x12\x13google.logging.type\x1a\x1cgoogle/api/annotations.proto*\x82\x01\n\x0bLogSeverity\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x00\x12\t\n\x05\x44\x45\x42UG\x10\x64\x12\t\n\x04INFO\x10\xc8\x01\x12\x0b\n\x06NOTICE\x10\xac\x02\x12\x0c\n\x07WARNING\x10\x90\x03\x12\n\n\x05\x45RROR\x10\xf4\x03\x12\r\n\x08\x43RITICAL\x10\xd8\x04\x12\n\n\x05\x41LERT\x10\xbc\x05\x12\x0e\n\tEMERGENCY\x10\xa0\x06\x42\x9f\x01\n\x17\x63om.google.logging.typeB\x10LogSeverityProtoP\x01Z8google.golang.org/genproto/googleapis/logging/type;ltype\xaa\x02\x19Google.Cloud.Logging.Type\xca\x02\x19Google\\Cloud\\Logging\\Typeb\x06proto3" - ), - dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR], -) - -_LOGSEVERITY = _descriptor.EnumDescriptor( - name="LogSeverity", - full_name="google.logging.type.LogSeverity", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="DEFAULT", index=0, number=0, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="DEBUG", index=1, number=100, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="INFO", index=2, number=200, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="NOTICE", index=3, number=300, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="WARNING", index=4, number=400, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="ERROR", index=5, number=500, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="CRITICAL", index=6, number=600, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="ALERT", index=7, number=700, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="EMERGENCY", index=8, number=800, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=94, - serialized_end=224, -) -_sym_db.RegisterEnumDescriptor(_LOGSEVERITY) - -LogSeverity = enum_type_wrapper.EnumTypeWrapper(_LOGSEVERITY) -DEFAULT = 0 -DEBUG = 100 -INFO = 200 -NOTICE = 300 -WARNING = 400 -ERROR = 500 -CRITICAL = 600 -ALERT = 700 -EMERGENCY = 800 - - -DESCRIPTOR.enum_types_by_name["LogSeverity"] = _LOGSEVERITY -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - - -DESCRIPTOR._options = None -# @@protoc_insertion_point(module_scope) diff --git a/logging/google/cloud/logging_v2/proto/logging_config.proto b/logging/google/cloud/logging_v2/proto/logging_config.proto index 1e3c84d3f419..a9ccdf51cb19 100644 --- a/logging/google/cloud/logging_v2/proto/logging_config.proto +++ b/logging/google/cloud/logging_v2/proto/logging_config.proto @@ -374,6 +374,12 @@ message LogSink { // // This field may not be present for older sinks. google.protobuf.Timestamp update_time = 14; + + // Do not use. This field is ignored. + google.protobuf.Timestamp start_time = 10 [deprecated = true]; + + // Do not use. This field is ignored. + google.protobuf.Timestamp end_time = 11 [deprecated = true]; } // Options that change functionality of a sink exporting data to BigQuery. diff --git a/logging/google/cloud/logging_v2/proto/logging_config_pb2.py b/logging/google/cloud/logging_v2/proto/logging_config_pb2.py index abeb244880d8..cc2a143fc6fc 100644 --- a/logging/google/cloud/logging_v2/proto/logging_config_pb2.py +++ b/logging/google/cloud/logging_v2/proto/logging_config_pb2.py @@ -1,6 +1,4 @@ # -*- coding: utf-8 -*- -# -*- coding: utf-8 -*- -# -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: google/cloud/logging_v2/proto/logging_config.proto @@ -17,10 +15,12 @@ _sym_db = _symbol_database.Default() -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 DESCRIPTOR = _descriptor.FileDescriptor( @@ -31,13 +31,15 @@ "\n\025com.google.logging.v2B\022LoggingConfigProtoP\001Z8google.golang.org/genproto/googleapis/logging/v2;logging\370\001\001\252\002\027Google.Cloud.Logging.V2\312\002\027Google\\Cloud\\Logging\\V2" ), serialized_pb=_b( - '\n2google/cloud/logging_v2/proto/logging_config.proto\x12\x11google.logging.v2\x1a\x1cgoogle/api/annotations.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xe3\x02\n\x07LogSink\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65stination\x18\x03 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x05 \x01(\t\x12K\n\x15output_version_format\x18\x06 \x01(\x0e\x32(.google.logging.v2.LogSink.VersionFormatB\x02\x18\x01\x12\x17\n\x0fwriter_identity\x18\x08 \x01(\t\x12\x18\n\x10include_children\x18\t \x01(\x08\x12\x32\n\nstart_time\x18\n \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x02\x18\x01\x12\x30\n\x08\x65nd_time\x18\x0b \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x02\x18\x01"?\n\rVersionFormat\x12\x1e\n\x1aVERSION_FORMAT_UNSPECIFIED\x10\x00\x12\x06\n\x02V2\x10\x01\x12\x06\n\x02V1\x10\x02"I\n\x10ListSinksRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05"W\n\x11ListSinksResponse\x12)\n\x05sinks\x18\x01 \x03(\x0b\x32\x1a.google.logging.v2.LogSink\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"#\n\x0eGetSinkRequest\x12\x11\n\tsink_name\x18\x01 \x01(\t"m\n\x11\x43reateSinkRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12(\n\x04sink\x18\x02 \x01(\x0b\x32\x1a.google.logging.v2.LogSink\x12\x1e\n\x16unique_writer_identity\x18\x03 \x01(\x08"\xa1\x01\n\x11UpdateSinkRequest\x12\x11\n\tsink_name\x18\x01 \x01(\t\x12(\n\x04sink\x18\x02 \x01(\x0b\x32\x1a.google.logging.v2.LogSink\x12\x1e\n\x16unique_writer_identity\x18\x03 \x01(\x08\x12/\n\x0bupdate_mask\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"&\n\x11\x44\x65leteSinkRequest\x12\x11\n\tsink_name\x18\x01 \x01(\t"S\n\x0cLogExclusion\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x03 \x01(\t\x12\x10\n\x08\x64isabled\x18\x04 \x01(\x08"N\n\x15ListExclusionsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05"f\n\x16ListExclusionsResponse\x12\x33\n\nexclusions\x18\x01 \x03(\x0b\x32\x1f.google.logging.v2.LogExclusion\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"#\n\x13GetExclusionRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"\\\n\x16\x43reateExclusionRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x32\n\texclusion\x18\x02 \x01(\x0b\x32\x1f.google.logging.v2.LogExclusion"\x8b\x01\n\x16UpdateExclusionRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x32\n\texclusion\x18\x02 \x01(\x0b\x32\x1f.google.logging.v2.LogExclusion\x12/\n\x0bupdate_mask\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"&\n\x16\x44\x65leteExclusionRequest\x12\x0c\n\x04name\x18\x01 \x01(\t2\x94\x19\n\x0f\x43onfigServiceV2\x12\x87\x02\n\tListSinks\x12#.google.logging.v2.ListSinksRequest\x1a$.google.logging.v2.ListSinksResponse"\xae\x01\x82\xd3\xe4\x93\x02\xa7\x01\x12\x16/v2/{parent=*/*}/sinksZ\x1f\x12\x1d/v2/{parent=projects/*}/sinksZ$\x12"/v2/{parent=organizations/*}/sinksZ\x1e\x12\x1c/v2/{parent=folders/*}/sinksZ&\x12$/v2/{parent=billingAccounts/*}/sinks\x12\x92\x02\n\x07GetSink\x12!.google.logging.v2.GetSinkRequest\x1a\x1a.google.logging.v2.LogSink"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01\x12\x1b/v2/{sink_name=*/*/sinks/*}Z$\x12"/v2/{sink_name=projects/*/sinks/*}Z)\x12\'/v2/{sink_name=organizations/*/sinks/*}Z#\x12!/v2/{sink_name=folders/*/sinks/*}Z+\x12)/v2/{sink_name=billingAccounts/*/sinks/*}\x12\x9d\x02\n\nCreateSink\x12$.google.logging.v2.CreateSinkRequest\x1a\x1a.google.logging.v2.LogSink"\xcc\x01\x82\xd3\xe4\x93\x02\xc5\x01"\x16/v2/{parent=*/*}/sinks:\x04sinkZ%"\x1d/v2/{parent=projects/*}/sinks:\x04sinkZ*""/v2/{parent=organizations/*}/sinks:\x04sinkZ$"\x1c/v2/{parent=folders/*}/sinks:\x04sinkZ,"$/v2/{parent=billingAccounts/*}/sinks:\x04sink\x12\xf1\x03\n\nUpdateSink\x12$.google.logging.v2.UpdateSinkRequest\x1a\x1a.google.logging.v2.LogSink"\xa0\x03\x82\xd3\xe4\x93\x02\x99\x03\x1a\x1b/v2/{sink_name=*/*/sinks/*}:\x04sinkZ*\x1a"/v2/{sink_name=projects/*/sinks/*}:\x04sinkZ/\x1a\'/v2/{sink_name=organizations/*/sinks/*}:\x04sinkZ)\x1a!/v2/{sink_name=folders/*/sinks/*}:\x04sinkZ1\x1a)/v2/{sink_name=billingAccounts/*/sinks/*}:\x04sinkZ*2"/v2/{sink_name=projects/*/sinks/*}:\x04sinkZ/2\'/v2/{sink_name=organizations/*/sinks/*}:\x04sinkZ)2!/v2/{sink_name=folders/*/sinks/*}:\x04sinkZ12)/v2/{sink_name=billingAccounts/*/sinks/*}:\x04sink\x12\x94\x02\n\nDeleteSink\x12$.google.logging.v2.DeleteSinkRequest\x1a\x16.google.protobuf.Empty"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01*\x1b/v2/{sink_name=*/*/sinks/*}Z$*"/v2/{sink_name=projects/*/sinks/*}Z)*\'/v2/{sink_name=organizations/*/sinks/*}Z#*!/v2/{sink_name=folders/*/sinks/*}Z+*)/v2/{sink_name=billingAccounts/*/sinks/*}\x12\xaf\x02\n\x0eListExclusions\x12(.google.logging.v2.ListExclusionsRequest\x1a).google.logging.v2.ListExclusionsResponse"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01\x12\x1b/v2/{parent=*/*}/exclusionsZ$\x12"/v2/{parent=projects/*}/exclusionsZ)\x12\'/v2/{parent=organizations/*}/exclusionsZ#\x12!/v2/{parent=folders/*}/exclusionsZ+\x12)/v2/{parent=billingAccounts/*}/exclusions\x12\xa1\x02\n\x0cGetExclusion\x12&.google.logging.v2.GetExclusionRequest\x1a\x1f.google.logging.v2.LogExclusion"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01\x12\x1b/v2/{name=*/*/exclusions/*}Z$\x12"/v2/{name=projects/*/exclusions/*}Z)\x12\'/v2/{name=organizations/*/exclusions/*}Z#\x12!/v2/{name=folders/*/exclusions/*}Z+\x12)/v2/{name=billingAccounts/*/exclusions/*}\x12\xde\x02\n\x0f\x43reateExclusion\x12).google.logging.v2.CreateExclusionRequest\x1a\x1f.google.logging.v2.LogExclusion"\xfe\x01\x82\xd3\xe4\x93\x02\xf7\x01"\x1b/v2/{parent=*/*}/exclusions:\texclusionZ/""/v2/{parent=projects/*}/exclusions:\texclusionZ4"\'/v2/{parent=organizations/*}/exclusions:\texclusionZ."!/v2/{parent=folders/*}/exclusions:\texclusionZ6")/v2/{parent=billingAccounts/*}/exclusions:\texclusion\x12\xde\x02\n\x0fUpdateExclusion\x12).google.logging.v2.UpdateExclusionRequest\x1a\x1f.google.logging.v2.LogExclusion"\xfe\x01\x82\xd3\xe4\x93\x02\xf7\x01\x32\x1b/v2/{name=*/*/exclusions/*}:\texclusionZ/2"/v2/{name=projects/*/exclusions/*}:\texclusionZ42\'/v2/{name=organizations/*/exclusions/*}:\texclusionZ.2!/v2/{name=folders/*/exclusions/*}:\texclusionZ62)/v2/{name=billingAccounts/*/exclusions/*}:\texclusion\x12\x9e\x02\n\x0f\x44\x65leteExclusion\x12).google.logging.v2.DeleteExclusionRequest\x1a\x16.google.protobuf.Empty"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01*\x1b/v2/{name=*/*/exclusions/*}Z$*"/v2/{name=projects/*/exclusions/*}Z)*\'/v2/{name=organizations/*/exclusions/*}Z#*!/v2/{name=folders/*/exclusions/*}Z+*)/v2/{name=billingAccounts/*/exclusions/*}B\x9e\x01\n\x15\x63om.google.logging.v2B\x12LoggingConfigProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3' + '\n2google/cloud/logging_v2/proto/logging_config.proto\x12\x11google.logging.v2\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto"\x90\x04\n\x07LogSink\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65stination\x18\x03 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x05 \x01(\t\x12K\n\x15output_version_format\x18\x06 \x01(\x0e\x32(.google.logging.v2.LogSink.VersionFormatB\x02\x18\x01\x12\x17\n\x0fwriter_identity\x18\x08 \x01(\t\x12\x18\n\x10include_children\x18\t \x01(\x08\x12>\n\x10\x62igquery_options\x18\x0c \x01(\x0b\x32".google.logging.v2.BigQueryOptionsH\x00\x12/\n\x0b\x63reate_time\x18\r \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\x0e \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x32\n\nstart_time\x18\n \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x02\x18\x01\x12\x30\n\x08\x65nd_time\x18\x0b \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x02\x18\x01"?\n\rVersionFormat\x12\x1e\n\x1aVERSION_FORMAT_UNSPECIFIED\x10\x00\x12\x06\n\x02V2\x10\x01\x12\x06\n\x02V1\x10\x02\x42\t\n\x07options"1\n\x0f\x42igQueryOptions\x12\x1e\n\x16use_partitioned_tables\x18\x01 \x01(\x08"I\n\x10ListSinksRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05"W\n\x11ListSinksResponse\x12)\n\x05sinks\x18\x01 \x03(\x0b\x32\x1a.google.logging.v2.LogSink\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"#\n\x0eGetSinkRequest\x12\x11\n\tsink_name\x18\x01 \x01(\t"m\n\x11\x43reateSinkRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12(\n\x04sink\x18\x02 \x01(\x0b\x32\x1a.google.logging.v2.LogSink\x12\x1e\n\x16unique_writer_identity\x18\x03 \x01(\x08"\xa1\x01\n\x11UpdateSinkRequest\x12\x11\n\tsink_name\x18\x01 \x01(\t\x12(\n\x04sink\x18\x02 \x01(\x0b\x32\x1a.google.logging.v2.LogSink\x12\x1e\n\x16unique_writer_identity\x18\x03 \x01(\x08\x12/\n\x0bupdate_mask\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"&\n\x11\x44\x65leteSinkRequest\x12\x11\n\tsink_name\x18\x01 \x01(\t"\xb5\x01\n\x0cLogExclusion\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x03 \x01(\t\x12\x10\n\x08\x64isabled\x18\x04 \x01(\x08\x12/\n\x0b\x63reate_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"N\n\x15ListExclusionsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05"f\n\x16ListExclusionsResponse\x12\x33\n\nexclusions\x18\x01 \x03(\x0b\x32\x1f.google.logging.v2.LogExclusion\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"#\n\x13GetExclusionRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"\\\n\x16\x43reateExclusionRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x32\n\texclusion\x18\x02 \x01(\x0b\x32\x1f.google.logging.v2.LogExclusion"\x8b\x01\n\x16UpdateExclusionRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x32\n\texclusion\x18\x02 \x01(\x0b\x32\x1f.google.logging.v2.LogExclusion\x12/\n\x0bupdate_mask\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"&\n\x16\x44\x65leteExclusionRequest\x12\x0c\n\x04name\x18\x01 \x01(\t2\xf6\x1a\n\x0f\x43onfigServiceV2\x12\x87\x02\n\tListSinks\x12#.google.logging.v2.ListSinksRequest\x1a$.google.logging.v2.ListSinksResponse"\xae\x01\x82\xd3\xe4\x93\x02\xa7\x01\x12\x16/v2/{parent=*/*}/sinksZ\x1f\x12\x1d/v2/{parent=projects/*}/sinksZ$\x12"/v2/{parent=organizations/*}/sinksZ\x1e\x12\x1c/v2/{parent=folders/*}/sinksZ&\x12$/v2/{parent=billingAccounts/*}/sinks\x12\x92\x02\n\x07GetSink\x12!.google.logging.v2.GetSinkRequest\x1a\x1a.google.logging.v2.LogSink"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01\x12\x1b/v2/{sink_name=*/*/sinks/*}Z$\x12"/v2/{sink_name=projects/*/sinks/*}Z)\x12\'/v2/{sink_name=organizations/*/sinks/*}Z#\x12!/v2/{sink_name=folders/*/sinks/*}Z+\x12)/v2/{sink_name=billingAccounts/*/sinks/*}\x12\x9d\x02\n\nCreateSink\x12$.google.logging.v2.CreateSinkRequest\x1a\x1a.google.logging.v2.LogSink"\xcc\x01\x82\xd3\xe4\x93\x02\xc5\x01"\x16/v2/{parent=*/*}/sinks:\x04sinkZ%"\x1d/v2/{parent=projects/*}/sinks:\x04sinkZ*""/v2/{parent=organizations/*}/sinks:\x04sinkZ$"\x1c/v2/{parent=folders/*}/sinks:\x04sinkZ,"$/v2/{parent=billingAccounts/*}/sinks:\x04sink\x12\xf1\x03\n\nUpdateSink\x12$.google.logging.v2.UpdateSinkRequest\x1a\x1a.google.logging.v2.LogSink"\xa0\x03\x82\xd3\xe4\x93\x02\x99\x03\x1a\x1b/v2/{sink_name=*/*/sinks/*}:\x04sinkZ*\x1a"/v2/{sink_name=projects/*/sinks/*}:\x04sinkZ/\x1a\'/v2/{sink_name=organizations/*/sinks/*}:\x04sinkZ)\x1a!/v2/{sink_name=folders/*/sinks/*}:\x04sinkZ1\x1a)/v2/{sink_name=billingAccounts/*/sinks/*}:\x04sinkZ*2"/v2/{sink_name=projects/*/sinks/*}:\x04sinkZ/2\'/v2/{sink_name=organizations/*/sinks/*}:\x04sinkZ)2!/v2/{sink_name=folders/*/sinks/*}:\x04sinkZ12)/v2/{sink_name=billingAccounts/*/sinks/*}:\x04sink\x12\x94\x02\n\nDeleteSink\x12$.google.logging.v2.DeleteSinkRequest\x1a\x16.google.protobuf.Empty"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01*\x1b/v2/{sink_name=*/*/sinks/*}Z$*"/v2/{sink_name=projects/*/sinks/*}Z)*\'/v2/{sink_name=organizations/*/sinks/*}Z#*!/v2/{sink_name=folders/*/sinks/*}Z+*)/v2/{sink_name=billingAccounts/*/sinks/*}\x12\xaf\x02\n\x0eListExclusions\x12(.google.logging.v2.ListExclusionsRequest\x1a).google.logging.v2.ListExclusionsResponse"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01\x12\x1b/v2/{parent=*/*}/exclusionsZ$\x12"/v2/{parent=projects/*}/exclusionsZ)\x12\'/v2/{parent=organizations/*}/exclusionsZ#\x12!/v2/{parent=folders/*}/exclusionsZ+\x12)/v2/{parent=billingAccounts/*}/exclusions\x12\xa1\x02\n\x0cGetExclusion\x12&.google.logging.v2.GetExclusionRequest\x1a\x1f.google.logging.v2.LogExclusion"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01\x12\x1b/v2/{name=*/*/exclusions/*}Z$\x12"/v2/{name=projects/*/exclusions/*}Z)\x12\'/v2/{name=organizations/*/exclusions/*}Z#\x12!/v2/{name=folders/*/exclusions/*}Z+\x12)/v2/{name=billingAccounts/*/exclusions/*}\x12\xde\x02\n\x0f\x43reateExclusion\x12).google.logging.v2.CreateExclusionRequest\x1a\x1f.google.logging.v2.LogExclusion"\xfe\x01\x82\xd3\xe4\x93\x02\xf7\x01"\x1b/v2/{parent=*/*}/exclusions:\texclusionZ/""/v2/{parent=projects/*}/exclusions:\texclusionZ4"\'/v2/{parent=organizations/*}/exclusions:\texclusionZ."!/v2/{parent=folders/*}/exclusions:\texclusionZ6")/v2/{parent=billingAccounts/*}/exclusions:\texclusion\x12\xde\x02\n\x0fUpdateExclusion\x12).google.logging.v2.UpdateExclusionRequest\x1a\x1f.google.logging.v2.LogExclusion"\xfe\x01\x82\xd3\xe4\x93\x02\xf7\x01\x32\x1b/v2/{name=*/*/exclusions/*}:\texclusionZ/2"/v2/{name=projects/*/exclusions/*}:\texclusionZ42\'/v2/{name=organizations/*/exclusions/*}:\texclusionZ.2!/v2/{name=folders/*/exclusions/*}:\texclusionZ62)/v2/{name=billingAccounts/*/exclusions/*}:\texclusion\x12\x9e\x02\n\x0f\x44\x65leteExclusion\x12).google.logging.v2.DeleteExclusionRequest\x1a\x16.google.protobuf.Empty"\xc7\x01\x82\xd3\xe4\x93\x02\xc0\x01*\x1b/v2/{name=*/*/exclusions/*}Z$*"/v2/{name=projects/*/exclusions/*}Z)*\'/v2/{name=organizations/*/exclusions/*}Z#*!/v2/{name=folders/*/exclusions/*}Z+*)/v2/{name=billingAccounts/*/exclusions/*}\x1a\xdf\x01\xca\x41\x16logging.googleapis.com\xd2\x41\xc2\x01https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-platform.read-only,https://www.googleapis.com/auth/logging.admin,https://www.googleapis.com/auth/logging.readB\x9e\x01\n\x15\x63om.google.logging.v2B\x12LoggingConfigProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3' ), dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR, google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, + google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_client__pb2.DESCRIPTOR, ], ) @@ -64,8 +66,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=492, - serialized_end=555, + serialized_start=711, + serialized_end=774, ) _sym_db.RegisterEnumDescriptor(_LOGSINK_VERSIONFORMAT) @@ -185,10 +187,64 @@ serialized_options=None, file=DESCRIPTOR, ), + _descriptor.FieldDescriptor( + name="bigquery_options", + full_name="google.logging.v2.LogSink.bigquery_options", + index=6, + number=12, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="create_time", + full_name="google.logging.v2.LogSink.create_time", + index=7, + number=13, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="update_time", + full_name="google.logging.v2.LogSink.update_time", + index=8, + number=14, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), _descriptor.FieldDescriptor( name="start_time", full_name="google.logging.v2.LogSink.start_time", - index=6, + index=9, number=10, type=11, cpp_type=10, @@ -206,7 +262,7 @@ _descriptor.FieldDescriptor( name="end_time", full_name="google.logging.v2.LogSink.end_time", - index=7, + index=10, number=11, type=11, cpp_type=10, @@ -224,14 +280,61 @@ ], extensions=[], nested_types=[], - enum_types=[_LOGSINK_VERSIONFORMAT], + enum_types=[_LOGSINK_VERSIONFORMAT,], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name="options", + full_name="google.logging.v2.LogSink.options", + index=0, + containing_type=None, + fields=[], + ), + ], + serialized_start=257, + serialized_end=785, +) + + +_BIGQUERYOPTIONS = _descriptor.Descriptor( + name="BigQueryOptions", + full_name="google.logging.v2.BigQueryOptions", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="use_partitioned_tables", + full_name="google.logging.v2.BigQueryOptions.use_partitioned_tables", + index=0, + number=1, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=200, - serialized_end=555, + serialized_start=787, + serialized_end=836, ) @@ -305,8 +408,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=557, - serialized_end=630, + serialized_start=838, + serialized_end=911, ) @@ -362,8 +465,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=632, - serialized_end=719, + serialized_start=913, + serialized_end=1000, ) @@ -391,7 +494,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -401,8 +504,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=721, - serialized_end=756, + serialized_start=1002, + serialized_end=1037, ) @@ -476,8 +579,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=758, - serialized_end=867, + serialized_start=1039, + serialized_end=1148, ) @@ -569,8 +672,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=870, - serialized_end=1031, + serialized_start=1151, + serialized_end=1312, ) @@ -598,7 +701,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -608,8 +711,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1033, - serialized_end=1071, + serialized_start=1314, + serialized_end=1352, ) @@ -692,6 +795,42 @@ serialized_options=None, file=DESCRIPTOR, ), + _descriptor.FieldDescriptor( + name="create_time", + full_name="google.logging.v2.LogExclusion.create_time", + index=4, + number=5, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="update_time", + full_name="google.logging.v2.LogExclusion.update_time", + index=5, + number=6, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), ], extensions=[], nested_types=[], @@ -701,8 +840,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1073, - serialized_end=1156, + serialized_start=1355, + serialized_end=1536, ) @@ -776,8 +915,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1158, - serialized_end=1236, + serialized_start=1538, + serialized_end=1616, ) @@ -833,8 +972,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1238, - serialized_end=1340, + serialized_start=1618, + serialized_end=1720, ) @@ -862,7 +1001,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -872,8 +1011,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1342, - serialized_end=1377, + serialized_start=1722, + serialized_end=1757, ) @@ -929,8 +1068,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1379, - serialized_end=1471, + serialized_start=1759, + serialized_end=1851, ) @@ -1004,8 +1143,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1474, - serialized_end=1613, + serialized_start=1854, + serialized_end=1993, ) @@ -1033,7 +1172,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -1043,11 +1182,18 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1615, - serialized_end=1653, + serialized_start=1995, + serialized_end=2033, ) _LOGSINK.fields_by_name["output_version_format"].enum_type = _LOGSINK_VERSIONFORMAT +_LOGSINK.fields_by_name["bigquery_options"].message_type = _BIGQUERYOPTIONS +_LOGSINK.fields_by_name[ + "create_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_LOGSINK.fields_by_name[ + "update_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP _LOGSINK.fields_by_name[ "start_time" ].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP @@ -1055,12 +1201,24 @@ "end_time" ].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP _LOGSINK_VERSIONFORMAT.containing_type = _LOGSINK +_LOGSINK.oneofs_by_name["options"].fields.append( + _LOGSINK.fields_by_name["bigquery_options"] +) +_LOGSINK.fields_by_name["bigquery_options"].containing_oneof = _LOGSINK.oneofs_by_name[ + "options" +] _LISTSINKSRESPONSE.fields_by_name["sinks"].message_type = _LOGSINK _CREATESINKREQUEST.fields_by_name["sink"].message_type = _LOGSINK _UPDATESINKREQUEST.fields_by_name["sink"].message_type = _LOGSINK _UPDATESINKREQUEST.fields_by_name[ "update_mask" ].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK +_LOGEXCLUSION.fields_by_name[ + "create_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_LOGEXCLUSION.fields_by_name[ + "update_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP _LISTEXCLUSIONSRESPONSE.fields_by_name["exclusions"].message_type = _LOGEXCLUSION _CREATEEXCLUSIONREQUEST.fields_by_name["exclusion"].message_type = _LOGEXCLUSION _UPDATEEXCLUSIONREQUEST.fields_by_name["exclusion"].message_type = _LOGEXCLUSION @@ -1068,6 +1226,7 @@ "update_mask" ].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK DESCRIPTOR.message_types_by_name["LogSink"] = _LOGSINK +DESCRIPTOR.message_types_by_name["BigQueryOptions"] = _BIGQUERYOPTIONS DESCRIPTOR.message_types_by_name["ListSinksRequest"] = _LISTSINKSREQUEST DESCRIPTOR.message_types_by_name["ListSinksResponse"] = _LISTSINKSRESPONSE DESCRIPTOR.message_types_by_name["GetSinkRequest"] = _GETSINKREQUEST @@ -1111,10 +1270,10 @@ sink's ``writer_identity``, set when the sink is created, must have permission to write to the destination or else the log entries are not exported. For more information, see `Exporting - Logs With Sinks `__. + Logs with Sinks `__. filter: Optional. An `advanced logs filter - `__. The only exported + `__. The only exported log entries are those that are in the resource owning the sink and that match the filter. For example: :: logName="projects/[PROJECT_ID]/logs/[LOG_ID]" AND @@ -1126,17 +1285,18 @@ writer_identity: Output only. An IAM identity—a service account or group—under which Logging writes the exported log entries to the sink's - destination. This field is set by `sinks.create - `__ - and `sinks.update `__, based on the setting of - ``unique_writer_identity`` in those methods. Until you grant - this identity write-access to the destination, log entry - exports from this sink will fail. For more information, see - `Granting access for a resource `__. Consult the destination service's documentation to - determine the appropriate IAM roles to assign to the identity. + destination. This field is set by + [sinks.create][google.logging.v2.ConfigServiceV2.CreateSink] + and + [sinks.update][google.logging.v2.ConfigServiceV2.UpdateSink] + based on the value of ``unique_writer_identity`` in those + methods. Until you grant this identity write-access to the + destination, log entry exports from this sink will fail. For + more information, see `Granting Access for a Resource + `__. Consult the + destination service's documentation to determine the + appropriate IAM roles to assign to the identity. include_children: Optional. This field applies only to sinks owned by organizations and folders. If the field is false, the default, @@ -1152,18 +1312,52 @@ certain child projects, filter on the project part of the log name: :: logName:("projects/test-project1/" OR "projects/test-project2/") AND resource.type=gce_instance + options: + Optional. Destination dependent options. + bigquery_options: + Optional. Options that affect sinks exporting data to + BigQuery. + create_time: + Output only. The creation timestamp of the sink. This field + may not be present for older sinks. + update_time: + Output only. The last update timestamp of the sink. This + field may not be present for older sinks. start_time: - Deprecated. This field is ignored when creating or updating - sinks. + Do not use. This field is ignored. end_time: - Deprecated. This field is ignored when creating or updating - sinks. + Do not use. This field is ignored. """, # @@protoc_insertion_point(class_scope:google.logging.v2.LogSink) ), ) _sym_db.RegisterMessage(LogSink) +BigQueryOptions = _reflection.GeneratedProtocolMessageType( + "BigQueryOptions", + (_message.Message,), + dict( + DESCRIPTOR=_BIGQUERYOPTIONS, + __module__="google.cloud.logging_v2.proto.logging_config_pb2", + __doc__="""Options that change functionality of a sink exporting data to BigQuery. + + + Attributes: + use_partitioned_tables: + Optional. Whether to use `BigQuery's partition tables + `__. By default, Logging + creates dated tables based on the log entries' timestamps, + e.g. syslog\_20170523. With partitioned tables the date suffix + is no longer present and `special query syntax + `__ has to be used + instead. In both cases, tables are sharded based on UTC + timezone. + """, + # @@protoc_insertion_point(class_scope:google.logging.v2.BigQueryOptions) + ), +) +_sym_db.RegisterMessage(BigQueryOptions) + ListSinksRequest = _reflection.GeneratedProtocolMessageType( "ListSinksRequest", (_message.Message,), @@ -1303,8 +1497,8 @@ Required. The updated sink, whose name is the same identifier that appears as part of ``sink_name``. unique_writer_identity: - Optional. See `sinks.create - `__ + Optional. See + [sinks.create][google.logging.v2.ConfigServiceV2.CreateSink] for a description of this field. When updating a sink, the effect of this field on the value of ``writer_identity`` in the updated sink depends on both the old and new values of @@ -1365,11 +1559,11 @@ DESCRIPTOR=_LOGEXCLUSION, __module__="google.cloud.logging_v2.proto.logging_config_pb2", __doc__="""Specifies a set of log entries that are not to be stored in Logging. If - your project receives a large volume of logs, you might be able to use + your GCP resource receives a large volume of logs, you can use exclusions to reduce your chargeable logs. Exclusions are processed after log sinks, so you can export log entries before they are excluded. - Audit log entries and log entries from Amazon Web Services are never - excluded. + Note that organization-level and folder-level exclusions don't apply to + child resources, and that you can't exclude audit log entries. Attributes: @@ -1382,19 +1576,25 @@ Optional. A description of this exclusion. filter: Required. An `advanced logs filter - `__ that matches the log + `__ that matches the log entries to be excluded. By using the `sample function - `__, you can + `__, you can exclude less than 100% of the matching log entries. For - example, the following filter matches 99% of low-severity log - entries from load balancers: - ``"resource.type=http_load_balancer severity`__ to change the value of this field. + it does not exclude any log entries. You can [update an + exclusion][google.logging.v2.ConfigServiceV2.UpdateExclusion] + to change the value of this field. + create_time: + Output only. The creation timestamp of the exclusion. This + field may not be present for older exclusions. + update_time: + Output only. The last update timestamp of the exclusion. This + field may not be present for older exclusions. """, # @@protoc_insertion_point(class_scope:google.logging.v2.LogExclusion) ), @@ -1530,7 +1730,7 @@ Required. New values for the existing exclusion. Only the fields specified in ``update_mask`` are relevant. update_mask: - Required. A nonempty list of fields to change in the existing + Required. A non-empty list of fields to change in the existing exclusion. New values for the fields are taken from the corresponding fields in the [LogExclusion][google.logging.v2.LogExclusion] included in @@ -1580,9 +1780,11 @@ full_name="google.logging.v2.ConfigServiceV2", file=DESCRIPTOR, index=0, - serialized_options=None, - serialized_start=1656, - serialized_end=4876, + serialized_options=_b( + "\312A\026logging.googleapis.com\322A\302\001https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-platform.read-only,https://www.googleapis.com/auth/logging.admin,https://www.googleapis.com/auth/logging.read" + ), + serialized_start=2036, + serialized_end=5482, methods=[ _descriptor.MethodDescriptor( name="ListSinks", diff --git a/logging/google/cloud/logging_v2/proto/logging_config_pb2_grpc.py b/logging/google/cloud/logging_v2/proto/logging_config_pb2_grpc.py index 6e93d39b46b4..b250dc7dec22 100644 --- a/logging/google/cloud/logging_v2/proto/logging_config_pb2_grpc.py +++ b/logging/google/cloud/logging_v2/proto/logging_config_pb2_grpc.py @@ -8,8 +8,7 @@ class ConfigServiceV2Stub(object): - """Service for configuring sinks used to export log entries out of - Logging. + """Service for configuring sinks used to route log entries. """ def __init__(self, channel): @@ -71,8 +70,7 @@ def __init__(self, channel): class ConfigServiceV2Servicer(object): - """Service for configuring sinks used to export log entries out of - Logging. + """Service for configuring sinks used to route log entries. """ def ListSinks(self, request, context): @@ -90,9 +88,9 @@ def GetSink(self, request, context): raise NotImplementedError("Method not implemented!") def CreateSink(self, request, context): - """Creates a sink that exports specified log entries to a destination. The + """Creates a sink that exports specified log entries to a destination. The export of newly-ingested log entries begins immediately, unless the sink's - `writer_identity` is not permitted to write to the destination. A sink can + `writer_identity` is not permitted to write to the destination. A sink can export log entries only from the resource owning the sink. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) @@ -100,8 +98,9 @@ def CreateSink(self, request, context): raise NotImplementedError("Method not implemented!") def UpdateSink(self, request, context): - """Updates a sink. This method replaces the following fields in the existing + """Updates a sink. This method replaces the following fields in the existing sink with values from the new sink: `destination`, and `filter`. + The updated sink might also have a new `writer_identity`; see the `unique_writer_identity` field. """ diff --git a/logging/google/cloud/logging_v2/proto/logging_metrics_pb2.py b/logging/google/cloud/logging_v2/proto/logging_metrics_pb2.py index 200f2b381014..1addc0a0b592 100644 --- a/logging/google/cloud/logging_v2/proto/logging_metrics_pb2.py +++ b/logging/google/cloud/logging_v2/proto/logging_metrics_pb2.py @@ -1,6 +1,4 @@ # -*- coding: utf-8 -*- -# -*- coding: utf-8 -*- -# -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: google/cloud/logging_v2/proto/logging_metrics.proto @@ -17,12 +15,13 @@ _sym_db = _symbol_database.Default() -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 from google.api import distribution_pb2 as google_dot_api_dot_distribution__pb2 from google.api import metric_pb2 as google_dot_api_dot_metric__pb2 from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 DESCRIPTOR = _descriptor.FileDescriptor( @@ -33,15 +32,16 @@ "\n\025com.google.logging.v2B\023LoggingMetricsProtoP\001Z8google.golang.org/genproto/googleapis/logging/v2;logging\370\001\001\252\002\027Google.Cloud.Logging.V2\312\002\027Google\\Cloud\\Logging\\V2" ), serialized_pb=_b( - '\n3google/cloud/logging_v2/proto/logging_metrics.proto\x12\x11google.logging.v2\x1a\x1cgoogle/api/annotations.proto\x1a\x1dgoogle/api/distribution.proto\x1a\x17google/api/metric.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xb1\x03\n\tLogMetric\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x03 \x01(\t\x12\x37\n\x11metric_descriptor\x18\x05 \x01(\x0b\x32\x1c.google.api.MetricDescriptor\x12\x17\n\x0fvalue_extractor\x18\x06 \x01(\t\x12K\n\x10label_extractors\x18\x07 \x03(\x0b\x32\x31.google.logging.v2.LogMetric.LabelExtractorsEntry\x12>\n\x0e\x62ucket_options\x18\x08 \x01(\x0b\x32&.google.api.Distribution.BucketOptions\x12<\n\x07version\x18\x04 \x01(\x0e\x32\'.google.logging.v2.LogMetric.ApiVersionB\x02\x18\x01\x1a\x36\n\x14LabelExtractorsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x1c\n\nApiVersion\x12\x06\n\x02V2\x10\x00\x12\x06\n\x02V1\x10\x01"N\n\x15ListLogMetricsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05"`\n\x16ListLogMetricsResponse\x12-\n\x07metrics\x18\x01 \x03(\x0b\x32\x1c.google.logging.v2.LogMetric\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"*\n\x13GetLogMetricRequest\x12\x13\n\x0bmetric_name\x18\x01 \x01(\t"V\n\x16\x43reateLogMetricRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12,\n\x06metric\x18\x02 \x01(\x0b\x32\x1c.google.logging.v2.LogMetric"[\n\x16UpdateLogMetricRequest\x12\x13\n\x0bmetric_name\x18\x01 \x01(\t\x12,\n\x06metric\x18\x02 \x01(\x0b\x32\x1c.google.logging.v2.LogMetric"-\n\x16\x44\x65leteLogMetricRequest\x12\x13\n\x0bmetric_name\x18\x01 \x01(\t2\xd4\x05\n\x10MetricsServiceV2\x12\x8e\x01\n\x0eListLogMetrics\x12(.google.logging.v2.ListLogMetricsRequest\x1a).google.logging.v2.ListLogMetricsResponse"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v2/{parent=projects/*}/metrics\x12\x84\x01\n\x0cGetLogMetric\x12&.google.logging.v2.GetLogMetricRequest\x1a\x1c.google.logging.v2.LogMetric".\x82\xd3\xe4\x93\x02(\x12&/v2/{metric_name=projects/*/metrics/*}\x12\x8b\x01\n\x0f\x43reateLogMetric\x12).google.logging.v2.CreateLogMetricRequest\x1a\x1c.google.logging.v2.LogMetric"/\x82\xd3\xe4\x93\x02)"\x1f/v2/{parent=projects/*}/metrics:\x06metric\x12\x92\x01\n\x0fUpdateLogMetric\x12).google.logging.v2.UpdateLogMetricRequest\x1a\x1c.google.logging.v2.LogMetric"6\x82\xd3\xe4\x93\x02\x30\x1a&/v2/{metric_name=projects/*/metrics/*}:\x06metric\x12\x84\x01\n\x0f\x44\x65leteLogMetric\x12).google.logging.v2.DeleteLogMetricRequest\x1a\x16.google.protobuf.Empty".\x82\xd3\xe4\x93\x02(*&/v2/{metric_name=projects/*/metrics/*}B\x9f\x01\n\x15\x63om.google.logging.v2B\x13LoggingMetricsProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3' + '\n3google/cloud/logging_v2/proto/logging_metrics.proto\x12\x11google.logging.v2\x1a\x1dgoogle/api/distribution.proto\x1a\x17google/api/metric.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto"\x93\x04\n\tLogMetric\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x03 \x01(\t\x12\x37\n\x11metric_descriptor\x18\x05 \x01(\x0b\x32\x1c.google.api.MetricDescriptor\x12\x17\n\x0fvalue_extractor\x18\x06 \x01(\t\x12K\n\x10label_extractors\x18\x07 \x03(\x0b\x32\x31.google.logging.v2.LogMetric.LabelExtractorsEntry\x12>\n\x0e\x62ucket_options\x18\x08 \x01(\x0b\x32&.google.api.Distribution.BucketOptions\x12/\n\x0b\x63reate_time\x18\t \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\n \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12<\n\x07version\x18\x04 \x01(\x0e\x32\'.google.logging.v2.LogMetric.ApiVersionB\x02\x18\x01\x1a\x36\n\x14LabelExtractorsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x1c\n\nApiVersion\x12\x06\n\x02V2\x10\x00\x12\x06\n\x02V1\x10\x01"N\n\x15ListLogMetricsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05"`\n\x16ListLogMetricsResponse\x12-\n\x07metrics\x18\x01 \x03(\x0b\x32\x1c.google.logging.v2.LogMetric\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"*\n\x13GetLogMetricRequest\x12\x13\n\x0bmetric_name\x18\x01 \x01(\t"V\n\x16\x43reateLogMetricRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12,\n\x06metric\x18\x02 \x01(\x0b\x32\x1c.google.logging.v2.LogMetric"[\n\x16UpdateLogMetricRequest\x12\x13\n\x0bmetric_name\x18\x01 \x01(\t\x12,\n\x06metric\x18\x02 \x01(\x0b\x32\x1c.google.logging.v2.LogMetric"-\n\x16\x44\x65leteLogMetricRequest\x12\x13\n\x0bmetric_name\x18\x01 \x01(\t2\xe4\x07\n\x10MetricsServiceV2\x12\x8e\x01\n\x0eListLogMetrics\x12(.google.logging.v2.ListLogMetricsRequest\x1a).google.logging.v2.ListLogMetricsResponse"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v2/{parent=projects/*}/metrics\x12\x84\x01\n\x0cGetLogMetric\x12&.google.logging.v2.GetLogMetricRequest\x1a\x1c.google.logging.v2.LogMetric".\x82\xd3\xe4\x93\x02(\x12&/v2/{metric_name=projects/*/metrics/*}\x12\x8b\x01\n\x0f\x43reateLogMetric\x12).google.logging.v2.CreateLogMetricRequest\x1a\x1c.google.logging.v2.LogMetric"/\x82\xd3\xe4\x93\x02)"\x1f/v2/{parent=projects/*}/metrics:\x06metric\x12\x92\x01\n\x0fUpdateLogMetric\x12).google.logging.v2.UpdateLogMetricRequest\x1a\x1c.google.logging.v2.LogMetric"6\x82\xd3\xe4\x93\x02\x30\x1a&/v2/{metric_name=projects/*/metrics/*}:\x06metric\x12\x84\x01\n\x0f\x44\x65leteLogMetric\x12).google.logging.v2.DeleteLogMetricRequest\x1a\x16.google.protobuf.Empty".\x82\xd3\xe4\x93\x02(*&/v2/{metric_name=projects/*/metrics/*}\x1a\x8d\x02\xca\x41\x16logging.googleapis.com\xd2\x41\xf0\x01https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-platform.read-only,https://www.googleapis.com/auth/logging.admin,https://www.googleapis.com/auth/logging.read,https://www.googleapis.com/auth/logging.writeB\x9f\x01\n\x15\x63om.google.logging.v2B\x13LoggingMetricsProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3' ), dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, google_dot_api_dot_distribution__pb2.DESCRIPTOR, google_dot_api_dot_metric__pb2.DESCRIPTOR, google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR, google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, + google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_client__pb2.DESCRIPTOR, ], ) @@ -61,8 +61,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=662, - serialized_end=690, + serialized_start=785, + serialized_end=813, ) _sym_db.RegisterEnumDescriptor(_LOGMETRIC_APIVERSION) @@ -119,8 +119,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=606, - serialized_end=660, + serialized_start=729, + serialized_end=783, ) _LOGMETRIC = _descriptor.Descriptor( @@ -256,10 +256,46 @@ serialized_options=None, file=DESCRIPTOR, ), + _descriptor.FieldDescriptor( + name="create_time", + full_name="google.logging.v2.LogMetric.create_time", + index=7, + number=9, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="update_time", + full_name="google.logging.v2.LogMetric.update_time", + index=8, + number=10, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), _descriptor.FieldDescriptor( name="version", full_name="google.logging.v2.LogMetric.version", - index=7, + index=9, number=4, type=14, cpp_type=8, @@ -276,15 +312,15 @@ ), ], extensions=[], - nested_types=[_LOGMETRIC_LABELEXTRACTORSENTRY], - enum_types=[_LOGMETRIC_APIVERSION], + nested_types=[_LOGMETRIC_LABELEXTRACTORSENTRY,], + enum_types=[_LOGMETRIC_APIVERSION,], serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=257, - serialized_end=690, + serialized_start=282, + serialized_end=813, ) @@ -358,8 +394,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=692, - serialized_end=770, + serialized_start=815, + serialized_end=893, ) @@ -415,8 +451,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=772, - serialized_end=868, + serialized_start=895, + serialized_end=991, ) @@ -444,7 +480,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -454,8 +490,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=870, - serialized_end=912, + serialized_start=993, + serialized_end=1035, ) @@ -511,8 +547,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=914, - serialized_end=1000, + serialized_start=1037, + serialized_end=1123, ) @@ -568,8 +604,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1002, - serialized_end=1093, + serialized_start=1125, + serialized_end=1216, ) @@ -597,7 +633,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -607,8 +643,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1095, - serialized_end=1140, + serialized_start=1218, + serialized_end=1263, ) _LOGMETRIC_LABELEXTRACTORSENTRY.containing_type = _LOGMETRIC @@ -621,6 +657,12 @@ _LOGMETRIC.fields_by_name[ "bucket_options" ].message_type = google_dot_api_dot_distribution__pb2._DISTRIBUTION_BUCKETOPTIONS +_LOGMETRIC.fields_by_name[ + "create_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_LOGMETRIC.fields_by_name[ + "update_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP _LOGMETRIC.fields_by_name["version"].enum_type = _LOGMETRIC_APIVERSION _LOGMETRIC_APIVERSION.containing_type = _LOGMETRIC _LISTLOGMETRICSRESPONSE.fields_by_name["metrics"].message_type = _LOGMETRIC @@ -676,7 +718,8 @@ project/metrics/nginx%2Frequests"``. description: Optional. A description of this metric, which is used in - documentation. + documentation. The maximum length of the description is 8000 + characters. filter: Required. An `advanced logs filter `__ which is used to @@ -740,6 +783,12 @@ based metric is using a DISTRIBUTION value type and it describes the bucket boundaries used to create a histogram of the extracted values. + create_time: + Output only. The creation timestamp of the metric. This field + may not be present for older metrics. + update_time: + Output only. The last update timestamp of the metric. This + field may not be present for older metrics. version: Deprecated. The API version that created or updated this metric. The v2 format is used by default and cannot be @@ -899,9 +948,11 @@ full_name="google.logging.v2.MetricsServiceV2", file=DESCRIPTOR, index=0, - serialized_options=None, - serialized_start=1143, - serialized_end=1867, + serialized_options=_b( + "\312A\026logging.googleapis.com\322A\360\001https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-platform.read-only,https://www.googleapis.com/auth/logging.admin,https://www.googleapis.com/auth/logging.read,https://www.googleapis.com/auth/logging.write" + ), + serialized_start=1266, + serialized_end=2262, methods=[ _descriptor.MethodDescriptor( name="ListLogMetrics", diff --git a/logging/google/cloud/logging_v2/proto/logging_pb2.py b/logging/google/cloud/logging_v2/proto/logging_pb2.py index 07c7a191fa2a..79a73bd0f5fc 100644 --- a/logging/google/cloud/logging_v2/proto/logging_pb2.py +++ b/logging/google/cloud/logging_v2/proto/logging_pb2.py @@ -1,6 +1,4 @@ # -*- coding: utf-8 -*- -# -*- coding: utf-8 -*- -# -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: google/cloud/logging_v2/proto/logging.proto @@ -17,17 +15,21 @@ _sym_db = _symbol_database.Default() -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 from google.api import ( monitored_resource_pb2 as google_dot_api_dot_monitored__resource__pb2, ) from google.cloud.logging_v2.proto import ( log_entry_pb2 as google_dot_cloud_dot_logging__v2_dot_proto_dot_log__entry__pb2, ) +from google.cloud.logging_v2.proto import ( + logging_config_pb2 as google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2, +) from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 from google.rpc import status_pb2 as google_dot_rpc_dot_status__pb2 +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 DESCRIPTOR = _descriptor.FileDescriptor( @@ -38,16 +40,18 @@ "\n\025com.google.logging.v2B\014LoggingProtoP\001Z8google.golang.org/genproto/googleapis/logging/v2;logging\370\001\001\252\002\027Google.Cloud.Logging.V2\312\002\027Google\\Cloud\\Logging\\V2" ), serialized_pb=_b( - '\n+google/cloud/logging_v2/proto/logging.proto\x12\x11google.logging.v2\x1a\x1cgoogle/api/annotations.proto\x1a#google/api/monitored_resource.proto\x1a-google/cloud/logging_v2/proto/log_entry.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto"$\n\x10\x44\x65leteLogRequest\x12\x10\n\x08log_name\x18\x01 \x01(\t"\xa9\x02\n\x16WriteLogEntriesRequest\x12\x10\n\x08log_name\x18\x01 \x01(\t\x12/\n\x08resource\x18\x02 \x01(\x0b\x32\x1d.google.api.MonitoredResource\x12\x45\n\x06labels\x18\x03 \x03(\x0b\x32\x35.google.logging.v2.WriteLogEntriesRequest.LabelsEntry\x12,\n\x07\x65ntries\x18\x04 \x03(\x0b\x32\x1b.google.logging.v2.LogEntry\x12\x17\n\x0fpartial_success\x18\x05 \x01(\x08\x12\x0f\n\x07\x64ry_run\x18\x06 \x01(\x08\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x19\n\x17WriteLogEntriesResponse"\xc8\x01\n\x1cWriteLogEntriesPartialErrors\x12]\n\x10log_entry_errors\x18\x01 \x03(\x0b\x32\x43.google.logging.v2.WriteLogEntriesPartialErrors.LogEntryErrorsEntry\x1aI\n\x13LogEntryErrorsEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12!\n\x05value\x18\x02 \x01(\x0b\x32\x12.google.rpc.Status:\x02\x38\x01"\x91\x01\n\x15ListLogEntriesRequest\x12\x17\n\x0bproject_ids\x18\x01 \x03(\tB\x02\x18\x01\x12\x16\n\x0eresource_names\x18\x08 \x03(\t\x12\x0e\n\x06\x66ilter\x18\x02 \x01(\t\x12\x10\n\x08order_by\x18\x03 \x01(\t\x12\x11\n\tpage_size\x18\x04 \x01(\x05\x12\x12\n\npage_token\x18\x05 \x01(\t"_\n\x16ListLogEntriesResponse\x12,\n\x07\x65ntries\x18\x01 \x03(\x0b\x32\x1b.google.logging.v2.LogEntry\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"P\n\'ListMonitoredResourceDescriptorsRequest\x12\x11\n\tpage_size\x18\x01 \x01(\x05\x12\x12\n\npage_token\x18\x02 \x01(\t"\x8a\x01\n(ListMonitoredResourceDescriptorsResponse\x12\x45\n\x14resource_descriptors\x18\x01 \x03(\x0b\x32\'.google.api.MonitoredResourceDescriptor\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"H\n\x0fListLogsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t">\n\x10ListLogsResponse\x12\x11\n\tlog_names\x18\x03 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t2\xd8\x07\n\x10LoggingServiceV2\x12\xeb\x01\n\tDeleteLog\x12#.google.logging.v2.DeleteLogRequest\x1a\x16.google.protobuf.Empty"\xa0\x01\x82\xd3\xe4\x93\x02\x99\x01* /v2/{log_name=projects/*/logs/*}Z\'*%/v2/{log_name=organizations/*/logs/*}Z!*\x1f/v2/{log_name=folders/*/logs/*}Z)*\'/v2/{log_name=billingAccounts/*/logs/*}\x12\x86\x01\n\x0fWriteLogEntries\x12).google.logging.v2.WriteLogEntriesRequest\x1a*.google.logging.v2.WriteLogEntriesResponse"\x1c\x82\xd3\xe4\x93\x02\x16"\x11/v2/entries:write:\x01*\x12\x82\x01\n\x0eListLogEntries\x12(.google.logging.v2.ListLogEntriesRequest\x1a).google.logging.v2.ListLogEntriesResponse"\x1b\x82\xd3\xe4\x93\x02\x15"\x10/v2/entries:list:\x01*\x12\xc5\x01\n ListMonitoredResourceDescriptors\x12:.google.logging.v2.ListMonitoredResourceDescriptorsRequest\x1a;.google.logging.v2.ListMonitoredResourceDescriptorsResponse"(\x82\xd3\xe4\x93\x02"\x12 /v2/monitoredResourceDescriptors\x12\xff\x01\n\x08ListLogs\x12".google.logging.v2.ListLogsRequest\x1a#.google.logging.v2.ListLogsResponse"\xa9\x01\x82\xd3\xe4\x93\x02\xa2\x01\x12\x15/v2/{parent=*/*}/logsZ\x1e\x12\x1c/v2/{parent=projects/*}/logsZ#\x12!/v2/{parent=organizations/*}/logsZ\x1d\x12\x1b/v2/{parent=folders/*}/logsZ%\x12#/v2/{parent=billingAccounts/*}/logsB\x98\x01\n\x15\x63om.google.logging.v2B\x0cLoggingProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3' + '\n+google/cloud/logging_v2/proto/logging.proto\x12\x11google.logging.v2\x1a#google/api/monitored_resource.proto\x1a-google/cloud/logging_v2/proto/log_entry.proto\x1a\x32google/cloud/logging_v2/proto/logging_config.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto"$\n\x10\x44\x65leteLogRequest\x12\x10\n\x08log_name\x18\x01 \x01(\t"\xa9\x02\n\x16WriteLogEntriesRequest\x12\x10\n\x08log_name\x18\x01 \x01(\t\x12/\n\x08resource\x18\x02 \x01(\x0b\x32\x1d.google.api.MonitoredResource\x12\x45\n\x06labels\x18\x03 \x03(\x0b\x32\x35.google.logging.v2.WriteLogEntriesRequest.LabelsEntry\x12,\n\x07\x65ntries\x18\x04 \x03(\x0b\x32\x1b.google.logging.v2.LogEntry\x12\x17\n\x0fpartial_success\x18\x05 \x01(\x08\x12\x0f\n\x07\x64ry_run\x18\x06 \x01(\x08\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x19\n\x17WriteLogEntriesResponse"\xc8\x01\n\x1cWriteLogEntriesPartialErrors\x12]\n\x10log_entry_errors\x18\x01 \x03(\x0b\x32\x43.google.logging.v2.WriteLogEntriesPartialErrors.LogEntryErrorsEntry\x1aI\n\x13LogEntryErrorsEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12!\n\x05value\x18\x02 \x01(\x0b\x32\x12.google.rpc.Status:\x02\x38\x01"\x91\x01\n\x15ListLogEntriesRequest\x12\x17\n\x0bproject_ids\x18\x01 \x03(\tB\x02\x18\x01\x12\x16\n\x0eresource_names\x18\x08 \x03(\t\x12\x0e\n\x06\x66ilter\x18\x02 \x01(\t\x12\x10\n\x08order_by\x18\x03 \x01(\t\x12\x11\n\tpage_size\x18\x04 \x01(\x05\x12\x12\n\npage_token\x18\x05 \x01(\t"_\n\x16ListLogEntriesResponse\x12,\n\x07\x65ntries\x18\x01 \x03(\x0b\x32\x1b.google.logging.v2.LogEntry\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"P\n\'ListMonitoredResourceDescriptorsRequest\x12\x11\n\tpage_size\x18\x01 \x01(\x05\x12\x12\n\npage_token\x18\x02 \x01(\t"\x8a\x01\n(ListMonitoredResourceDescriptorsResponse\x12\x45\n\x14resource_descriptors\x18\x01 \x03(\x0b\x32\'.google.api.MonitoredResourceDescriptor\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"H\n\x0fListLogsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t">\n\x10ListLogsResponse\x12\x11\n\tlog_names\x18\x03 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t2\x85\n\n\x10LoggingServiceV2\x12\x88\x02\n\tDeleteLog\x12#.google.logging.v2.DeleteLogRequest\x1a\x16.google.protobuf.Empty"\xbd\x01\x82\xd3\xe4\x93\x02\xb6\x01* /v2/{log_name=projects/*/logs/*}Z\x1b*\x19/v2/{log_name=*/*/logs/*}Z\'*%/v2/{log_name=organizations/*/logs/*}Z!*\x1f/v2/{log_name=folders/*/logs/*}Z)*\'/v2/{log_name=billingAccounts/*/logs/*}\x12\x86\x01\n\x0fWriteLogEntries\x12).google.logging.v2.WriteLogEntriesRequest\x1a*.google.logging.v2.WriteLogEntriesResponse"\x1c\x82\xd3\xe4\x93\x02\x16"\x11/v2/entries:write:\x01*\x12\x82\x01\n\x0eListLogEntries\x12(.google.logging.v2.ListLogEntriesRequest\x1a).google.logging.v2.ListLogEntriesResponse"\x1b\x82\xd3\xe4\x93\x02\x15"\x10/v2/entries:list:\x01*\x12\xc5\x01\n ListMonitoredResourceDescriptors\x12:.google.logging.v2.ListMonitoredResourceDescriptorsRequest\x1a;.google.logging.v2.ListMonitoredResourceDescriptorsResponse"(\x82\xd3\xe4\x93\x02"\x12 /v2/monitoredResourceDescriptors\x12\xff\x01\n\x08ListLogs\x12".google.logging.v2.ListLogsRequest\x1a#.google.logging.v2.ListLogsResponse"\xa9\x01\x82\xd3\xe4\x93\x02\xa2\x01\x12\x15/v2/{parent=*/*}/logsZ\x1e\x12\x1c/v2/{parent=projects/*}/logsZ#\x12!/v2/{parent=organizations/*}/logsZ\x1d\x12\x1b/v2/{parent=folders/*}/logsZ%\x12#/v2/{parent=billingAccounts/*}/logs\x1a\x8d\x02\xca\x41\x16logging.googleapis.com\xd2\x41\xf0\x01https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-platform.read-only,https://www.googleapis.com/auth/logging.admin,https://www.googleapis.com/auth/logging.read,https://www.googleapis.com/auth/logging.writeB\x98\x01\n\x15\x63om.google.logging.v2B\x0cLoggingProtoP\x01Z8google.golang.org/genproto/googleapis/logging/v2;logging\xf8\x01\x01\xaa\x02\x17Google.Cloud.Logging.V2\xca\x02\x17Google\\Cloud\\Logging\\V2b\x06proto3' ), dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, google_dot_api_dot_monitored__resource__pb2.DESCRIPTOR, google_dot_cloud_dot_logging__v2_dot_proto_dot_log__entry__pb2.DESCRIPTOR, + google_dot_cloud_dot_logging__v2_dot_proto_dot_logging__config__pb2.DESCRIPTOR, google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, google_dot_rpc_dot_status__pb2.DESCRIPTOR, + google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_client__pb2.DESCRIPTOR, ], ) @@ -76,7 +80,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -86,8 +90,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=299, - serialized_end=335, + serialized_start=376, + serialized_end=412, ) @@ -143,8 +147,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=590, - serialized_end=635, + serialized_start=667, + serialized_end=712, ) _WRITELOGENTRIESREQUEST = _descriptor.Descriptor( @@ -264,15 +268,15 @@ ), ], extensions=[], - nested_types=[_WRITELOGENTRIESREQUEST_LABELSENTRY], + nested_types=[_WRITELOGENTRIESREQUEST_LABELSENTRY,], enum_types=[], serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=338, - serialized_end=635, + serialized_start=415, + serialized_end=712, ) @@ -291,8 +295,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=637, - serialized_end=662, + serialized_start=714, + serialized_end=739, ) @@ -348,8 +352,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=792, - serialized_end=865, + serialized_start=869, + serialized_end=942, ) _WRITELOGENTRIESPARTIALERRORS = _descriptor.Descriptor( @@ -376,18 +380,18 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], - nested_types=[_WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY], + nested_types=[_WRITELOGENTRIESPARTIALERRORS_LOGENTRYERRORSENTRY,], enum_types=[], serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=665, - serialized_end=865, + serialized_start=742, + serialized_end=942, ) @@ -515,8 +519,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=868, - serialized_end=1013, + serialized_start=945, + serialized_end=1090, ) @@ -572,8 +576,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1015, - serialized_end=1110, + serialized_start=1092, + serialized_end=1187, ) @@ -629,8 +633,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1112, - serialized_end=1192, + serialized_start=1189, + serialized_end=1269, ) @@ -686,8 +690,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1195, - serialized_end=1333, + serialized_start=1272, + serialized_end=1410, ) @@ -761,8 +765,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1335, - serialized_end=1407, + serialized_start=1412, + serialized_end=1484, ) @@ -818,8 +822,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1409, - serialized_end=1471, + serialized_start=1486, + serialized_end=1548, ) _WRITELOGENTRIESREQUEST_LABELSENTRY.containing_type = _WRITELOGENTRIESREQUEST @@ -960,7 +964,7 @@ `logs retention period `__ in the past or more than 24 hours in the future will not be available when calling ``entries.list``. However, those log entries can still - be exported with `LogSinks `__. To improve throughput and to avoid exceeding the `quota limit `__ for calls to ``entries.write``, you should try to include several log @@ -1044,9 +1048,7 @@ project_ids: Deprecated. Use ``resource_names`` instead. One or more project identifiers or project numbers from which to retrieve - log entries. Example: ``"my-project-1A"``. If present, these - project identifiers are converted to resource name format and - added to the list of resources in ``resource_names``. + log entries. Example: ``"my-project-1A"``. resource_names: Required. Names of one or more parent resources from which to retrieve log entries: :: "projects/[PROJECT_ID]" @@ -1216,8 +1218,8 @@ Attributes: log_names: A list of log names. For example, ``"projects/my- - project/syslog"`` or ``"organizations/123/cloudresourcemanager - .googleapis.com%2Factivity"``. + project/logs/syslog"`` or ``"organizations/123/logs/cloudresou + rcemanager.googleapis.com%2Factivity"``. next_page_token: If there might be more results than those appearing in this response, then ``nextPageToken`` is included. To get the next @@ -1240,9 +1242,11 @@ full_name="google.logging.v2.LoggingServiceV2", file=DESCRIPTOR, index=0, - serialized_options=None, - serialized_start=1474, - serialized_end=2458, + serialized_options=_b( + "\312A\026logging.googleapis.com\322A\360\001https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-platform.read-only,https://www.googleapis.com/auth/logging.admin,https://www.googleapis.com/auth/logging.read,https://www.googleapis.com/auth/logging.write" + ), + serialized_start=1551, + serialized_end=2836, methods=[ _descriptor.MethodDescriptor( name="DeleteLog", @@ -1252,7 +1256,7 @@ input_type=_DELETELOGREQUEST, output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, serialized_options=_b( - "\202\323\344\223\002\231\001* /v2/{log_name=projects/*/logs/*}Z'*%/v2/{log_name=organizations/*/logs/*}Z!*\037/v2/{log_name=folders/*/logs/*}Z)*'/v2/{log_name=billingAccounts/*/logs/*}" + "\202\323\344\223\002\266\001* /v2/{log_name=projects/*/logs/*}Z\033*\031/v2/{log_name=*/*/logs/*}Z'*%/v2/{log_name=organizations/*/logs/*}Z!*\037/v2/{log_name=folders/*/logs/*}Z)*'/v2/{log_name=billingAccounts/*/logs/*}" ), ), _descriptor.MethodDescriptor( diff --git a/logging/google/cloud/logging_v2/proto/logging_pb2_grpc.py b/logging/google/cloud/logging_v2/proto/logging_pb2_grpc.py index d67dd2c95fd0..2a2b3656925c 100644 --- a/logging/google/cloud/logging_v2/proto/logging_pb2_grpc.py +++ b/logging/google/cloud/logging_v2/proto/logging_pb2_grpc.py @@ -72,9 +72,9 @@ def WriteLogEntries(self, request, context): raise NotImplementedError("Method not implemented!") def ListLogEntries(self, request, context): - """Lists log entries. Use this method to retrieve log entries from - Logging. For ways to export log entries, see - [Exporting Logs](/logging/docs/export). + """Lists log entries. Use this method to retrieve log entries that originated + from a project/folder/organization/billing account. For ways to export log + entries, see [Exporting Logs](/logging/docs/export). """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") diff --git a/logging/setup.py b/logging/setup.py index 6d3da8a110f9..00c9cb388a12 100644 --- a/logging/setup.py +++ b/logging/setup.py @@ -22,15 +22,15 @@ name = 'google-cloud-logging' description = 'Stackdriver Logging API client library' -version = '1.12.1' +version = '1.14.0' # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' # 'Development Status :: 5 - Production/Stable' release_status = 'Development Status :: 5 - Production/Stable' dependencies = [ - 'google-api-core[grpc] >= 1.14.0, < 2.0.0dev', - "google-cloud-core >= 1.0.0, < 2.0dev", + "google-api-core[grpc] >= 1.14.0, < 2.0.0dev", + "google-cloud-core >= 1.0.3, < 2.0dev", ] extras = { } diff --git a/logging/synth.metadata b/logging/synth.metadata index d9ea35fc2dd8..44367f98f48d 100644 --- a/logging/synth.metadata +++ b/logging/synth.metadata @@ -1,26 +1,26 @@ { - "updateTime": "2019-09-23T16:17:44.431083Z", + "updateTime": "2019-10-29T12:31:03.926658Z", "sources": [ { "generator": { "name": "artman", - "version": "0.37.0", - "dockerImage": "googleapis/artman@sha256:0f66008f69061ea6d41499e2a34da3fc64fc7c9798077e3a37158653a135d801" + "version": "0.40.3", + "dockerImage": "googleapis/artman@sha256:c805f50525f5f557886c94ab76f56eaa09cb1da58c3ee95111fd34259376621a" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "999d0930cea7a7cb3147a7c5432e1f011060d549", - "internalRef": "270363949" + "sha": "532773acbed8d09451dafb3d403ab1823e6a6e1e", + "internalRef": "277177415" } }, { "template": { "name": "python_library", "origin": "synthtool.gcp", - "version": "2019.5.2" + "version": "2019.10.17" } } ], diff --git a/logging/synth.py b/logging/synth.py index 43bd1b7bbe50..4364f387b4c9 100644 --- a/logging/synth.py +++ b/logging/synth.py @@ -30,9 +30,13 @@ include_protos=True, ) -s.move(library / "google/cloud/logging_v2/proto") +# the structure of the logging directory is a bit different, so manually copy the protos +s.move(library / "google/cloud/logging_v2/proto/cloud/logging_v2/proto", "google/cloud/logging_v2/proto") +s.move(library / "google/cloud/logging_v2/proto/*.proto") + s.move(library / "google/cloud/logging_v2/gapic") s.move(library / "tests/unit/gapic/v2") +s.move(library / "docs/gapic/v2") # ---------------------------------------------------------------------------- # Add templated files diff --git a/logging/tests/unit/handlers/transports/test_background_thread.py b/logging/tests/unit/handlers/transports/test_background_thread.py index 7de912560aa7..7edae8a7bfa0 100644 --- a/logging/tests/unit/handlers/transports/test_background_thread.py +++ b/logging/tests/unit/handlers/transports/test_background_thread.py @@ -43,7 +43,7 @@ def test_constructor(self): transport, worker = self._make_one(client, name) - logger, = worker.call_args[0] # call_args[0] is *args. + (logger,) = worker.call_args[0] # call_args[0] is *args. self.assertEqual(logger.name, name) def test_send(self): diff --git a/logging/tests/unit/test_client.py b/logging/tests/unit/test_client.py index 5acd736185fc..4e0b5ca22f0d 100644 --- a/logging/tests/unit/test_client.py +++ b/logging/tests/unit/test_client.py @@ -676,7 +676,7 @@ def test_setup_logging(self): self.assertEqual(len(mocked.mock_calls), 1) _, args, kwargs = mocked.mock_calls[0] - handler, = args + (handler,) = args self.assertIsInstance(handler, CloudLoggingHandler) handler.transport.worker.stop() @@ -710,7 +710,7 @@ def test_setup_logging_w_extra_kwargs(self): self.assertEqual(len(mocked.mock_calls), 1) _, args, kwargs = mocked.mock_calls[0] - handler, = args + (handler,) = args self.assertIsInstance(handler, CloudLoggingHandler) self.assertEqual(handler.name, name) self.assertEqual(handler.resource, resource) diff --git a/monitoring/tests/system/test_vpcsc.py b/monitoring/tests/system/test_vpcsc_v3.py similarity index 71% rename from monitoring/tests/system/test_vpcsc.py rename to monitoring/tests/system/test_vpcsc_v3.py index ffc78cbfc047..6ef514fa2891 100644 --- a/monitoring/tests/system/test_vpcsc.py +++ b/monitoring/tests/system/test_vpcsc_v3.py @@ -1,12 +1,17 @@ # -*- coding: utf-8 -*- # +# flake8: noqa +# +# DO NOT MODIFY! THIS FILE IS AUTO-GENERATED. +# This file is auto-generated on 11 Oct 19 21:43 UTC. + # Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# https://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -14,22 +19,14 @@ # See the License for the specific language governing permissions and # limitations under the License. - -# DO NOT MODIFY! AUTO-GENERATED! -# This file is auto-generated on 2019-05-03. - -# flake8: noqa - import os import pytest - +import logging from google.api_core import exceptions from google.cloud import monitoring_v3 from google.cloud.monitoring_v3 import enums PROJECT_INSIDE = os.environ.get("PROJECT_ID", None) -if not PROJECT_INSIDE: - PROJECT_INSIDE = None PROJECT_OUTSIDE = os.environ.get( "GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", None ) @@ -39,6 +36,7 @@ class TestVPCServiceControlV3(object): @staticmethod def _is_rejected(call): + logger = logging.getLogger(__name__) try: responses = call() @@ -47,8 +45,10 @@ def _is_rejected(call): # instance, or None. list(responses) except exceptions.PermissionDenied as e: + logger.debug(e) return e.message == "Request is prohibited by organization's policy" - except: + except Exception as e: + logger.debug(e) pass return False @@ -62,10 +62,10 @@ def _do_test(delayed_inside, delayed_outside): assert TestVPCServiceControlV3._is_rejected(delayed_inside) @pytest.mark.skipif( - PROJECT_INSIDE is None, reason="Missing environment variable: PROJECT_ID" + not PROJECT_INSIDE, reason="Missing environment variable: PROJECT_ID" ) @pytest.mark.skipif( - PROJECT_OUTSIDE is None, + not PROJECT_OUTSIDE, reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", ) def test_create_alert_policy(self): @@ -74,13 +74,13 @@ def test_create_alert_policy(self): delayed_inside = lambda: client.create_alert_policy(name_inside, {}) name_outside = client.project_path(PROJECT_OUTSIDE) delayed_outside = lambda: client.create_alert_policy(name_outside, {}) - TestVPCServiceControlV3._do_test(delayed_inside, delayed_outside) + self._do_test(delayed_inside, delayed_outside) @pytest.mark.skipif( - PROJECT_INSIDE is None, reason="Missing environment variable: PROJECT_ID" + not PROJECT_INSIDE, reason="Missing environment variable: PROJECT_ID" ) @pytest.mark.skipif( - PROJECT_OUTSIDE is None, + not PROJECT_OUTSIDE, reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", ) def test_delete_alert_policy(self): @@ -89,13 +89,13 @@ def test_delete_alert_policy(self): delayed_inside = lambda: client.delete_alert_policy(name_inside) name_outside = client.alert_policy_path(PROJECT_OUTSIDE, "mock_alert_policy") delayed_outside = lambda: client.delete_alert_policy(name_outside) - TestVPCServiceControlV3._do_test(delayed_inside, delayed_outside) + self._do_test(delayed_inside, delayed_outside) @pytest.mark.skipif( - PROJECT_INSIDE is None, reason="Missing environment variable: PROJECT_ID" + not PROJECT_INSIDE, reason="Missing environment variable: PROJECT_ID" ) @pytest.mark.skipif( - PROJECT_OUTSIDE is None, + not PROJECT_OUTSIDE, reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", ) def test_get_alert_policy(self): @@ -104,13 +104,13 @@ def test_get_alert_policy(self): delayed_inside = lambda: client.get_alert_policy(name_inside) name_outside = client.alert_policy_path(PROJECT_OUTSIDE, "mock_alert_policy") delayed_outside = lambda: client.get_alert_policy(name_outside) - TestVPCServiceControlV3._do_test(delayed_inside, delayed_outside) + self._do_test(delayed_inside, delayed_outside) @pytest.mark.skipif( - PROJECT_INSIDE is None, reason="Missing environment variable: PROJECT_ID" + not PROJECT_INSIDE, reason="Missing environment variable: PROJECT_ID" ) @pytest.mark.skipif( - PROJECT_OUTSIDE is None, + not PROJECT_OUTSIDE, reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", ) def test_list_alert_policies(self): @@ -119,13 +119,13 @@ def test_list_alert_policies(self): delayed_inside = lambda: client.list_alert_policies(name_inside) name_outside = client.project_path(PROJECT_OUTSIDE) delayed_outside = lambda: client.list_alert_policies(name_outside) - TestVPCServiceControlV3._do_test(delayed_inside, delayed_outside) + self._do_test(delayed_inside, delayed_outside) @pytest.mark.skipif( - PROJECT_INSIDE is None, reason="Missing environment variable: PROJECT_ID" + not PROJECT_INSIDE, reason="Missing environment variable: PROJECT_ID" ) @pytest.mark.skipif( - PROJECT_OUTSIDE is None, + not PROJECT_OUTSIDE, reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", ) def test_update_alert_policy(self): @@ -134,13 +134,13 @@ def test_update_alert_policy(self): delayed_inside = lambda: client.update_alert_policy({"name": name_inside}) name_outside = client.alert_policy_path(PROJECT_OUTSIDE, "mock_alert_policy") delayed_outside = lambda: client.update_alert_policy({"name": name_outside}) - TestVPCServiceControlV3._do_test(delayed_inside, delayed_outside) + self._do_test(delayed_inside, delayed_outside) @pytest.mark.skipif( - PROJECT_INSIDE is None, reason="Missing environment variable: PROJECT_ID" + not PROJECT_INSIDE, reason="Missing environment variable: PROJECT_ID" ) @pytest.mark.skipif( - PROJECT_OUTSIDE is None, + not PROJECT_OUTSIDE, reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", ) def test_create_group(self): @@ -149,13 +149,13 @@ def test_create_group(self): delayed_inside = lambda: client.create_group(name_inside, {}) name_outside = client.project_path(PROJECT_OUTSIDE) delayed_outside = lambda: client.create_group(name_outside, {}) - TestVPCServiceControlV3._do_test(delayed_inside, delayed_outside) + self._do_test(delayed_inside, delayed_outside) @pytest.mark.skipif( - PROJECT_INSIDE is None, reason="Missing environment variable: PROJECT_ID" + not PROJECT_INSIDE, reason="Missing environment variable: PROJECT_ID" ) @pytest.mark.skipif( - PROJECT_OUTSIDE is None, + not PROJECT_OUTSIDE, reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", ) def test_delete_group(self): @@ -164,13 +164,13 @@ def test_delete_group(self): delayed_inside = lambda: client.delete_group(name_inside) name_outside = client.group_path(PROJECT_OUTSIDE, "mock_group") delayed_outside = lambda: client.delete_group(name_outside) - TestVPCServiceControlV3._do_test(delayed_inside, delayed_outside) + self._do_test(delayed_inside, delayed_outside) @pytest.mark.skipif( - PROJECT_INSIDE is None, reason="Missing environment variable: PROJECT_ID" + not PROJECT_INSIDE, reason="Missing environment variable: PROJECT_ID" ) @pytest.mark.skipif( - PROJECT_OUTSIDE is None, + not PROJECT_OUTSIDE, reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", ) def test_get_group(self): @@ -179,28 +179,28 @@ def test_get_group(self): delayed_inside = lambda: client.get_group(name_inside) name_outside = client.group_path(PROJECT_OUTSIDE, "mock_group") delayed_outside = lambda: client.get_group(name_outside) - TestVPCServiceControlV3._do_test(delayed_inside, delayed_outside) + self._do_test(delayed_inside, delayed_outside) @pytest.mark.skipif( - PROJECT_INSIDE is None, reason="Missing environment variable: PROJECT_ID" + not PROJECT_INSIDE, reason="Missing environment variable: PROJECT_ID" ) @pytest.mark.skipif( - PROJECT_OUTSIDE is None, + not PROJECT_OUTSIDE, reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", ) def test_list_group_members(self): client = monitoring_v3.GroupServiceClient() - name_inside = client.project_path(PROJECT_INSIDE) + name_inside = client.group_path(PROJECT_INSIDE, "mock_group") delayed_inside = lambda: client.list_group_members(name_inside) - name_outside = client.project_path(PROJECT_OUTSIDE) + name_outside = client.group_path(PROJECT_OUTSIDE, "mock_group") delayed_outside = lambda: client.list_group_members(name_outside) - TestVPCServiceControlV3._do_test(delayed_inside, delayed_outside) + self._do_test(delayed_inside, delayed_outside) @pytest.mark.skipif( - PROJECT_INSIDE is None, reason="Missing environment variable: PROJECT_ID" + not PROJECT_INSIDE, reason="Missing environment variable: PROJECT_ID" ) @pytest.mark.skipif( - PROJECT_OUTSIDE is None, + not PROJECT_OUTSIDE, reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", ) def test_list_groups(self): @@ -209,13 +209,13 @@ def test_list_groups(self): delayed_inside = lambda: client.list_groups(name_inside) name_outside = client.project_path(PROJECT_OUTSIDE) delayed_outside = lambda: client.list_groups(name_outside) - TestVPCServiceControlV3._do_test(delayed_inside, delayed_outside) + self._do_test(delayed_inside, delayed_outside) @pytest.mark.skipif( - PROJECT_INSIDE is None, reason="Missing environment variable: PROJECT_ID" + not PROJECT_INSIDE, reason="Missing environment variable: PROJECT_ID" ) @pytest.mark.skipif( - PROJECT_OUTSIDE is None, + not PROJECT_OUTSIDE, reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", ) def test_update_group(self): @@ -224,13 +224,13 @@ def test_update_group(self): delayed_inside = lambda: client.update_group({"name": name_inside}) name_outside = client.group_path(PROJECT_OUTSIDE, "mock_group") delayed_outside = lambda: client.update_group({"name": name_outside}) - TestVPCServiceControlV3._do_test(delayed_inside, delayed_outside) + self._do_test(delayed_inside, delayed_outside) @pytest.mark.skipif( - PROJECT_INSIDE is None, reason="Missing environment variable: PROJECT_ID" + not PROJECT_INSIDE, reason="Missing environment variable: PROJECT_ID" ) @pytest.mark.skipif( - PROJECT_OUTSIDE is None, + not PROJECT_OUTSIDE, reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", ) def test_create_metric_descriptor(self): @@ -239,13 +239,13 @@ def test_create_metric_descriptor(self): delayed_inside = lambda: client.create_metric_descriptor(name_inside, {}) name_outside = client.project_path(PROJECT_OUTSIDE) delayed_outside = lambda: client.create_metric_descriptor(name_outside, {}) - TestVPCServiceControlV3._do_test(delayed_inside, delayed_outside) + self._do_test(delayed_inside, delayed_outside) @pytest.mark.skipif( - PROJECT_INSIDE is None, reason="Missing environment variable: PROJECT_ID" + not PROJECT_INSIDE, reason="Missing environment variable: PROJECT_ID" ) @pytest.mark.skipif( - PROJECT_OUTSIDE is None, + not PROJECT_OUTSIDE, reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", ) def test_create_time_series(self): @@ -254,13 +254,13 @@ def test_create_time_series(self): delayed_inside = lambda: client.create_time_series(name_inside, {}) name_outside = client.project_path(PROJECT_OUTSIDE) delayed_outside = lambda: client.create_time_series(name_outside, {}) - TestVPCServiceControlV3._do_test(delayed_inside, delayed_outside) + self._do_test(delayed_inside, delayed_outside) @pytest.mark.skipif( - PROJECT_INSIDE is None, reason="Missing environment variable: PROJECT_ID" + not PROJECT_INSIDE, reason="Missing environment variable: PROJECT_ID" ) @pytest.mark.skipif( - PROJECT_OUTSIDE is None, + not PROJECT_OUTSIDE, reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", ) def test_delete_metric_descriptor(self): @@ -273,13 +273,13 @@ def test_delete_metric_descriptor(self): PROJECT_OUTSIDE, "mock_metric_descriptor" ) delayed_outside = lambda: client.delete_metric_descriptor(name_outside) - TestVPCServiceControlV3._do_test(delayed_inside, delayed_outside) + self._do_test(delayed_inside, delayed_outside) @pytest.mark.skipif( - PROJECT_INSIDE is None, reason="Missing environment variable: PROJECT_ID" + not PROJECT_INSIDE, reason="Missing environment variable: PROJECT_ID" ) @pytest.mark.skipif( - PROJECT_OUTSIDE is None, + not PROJECT_OUTSIDE, reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", ) def test_get_metric_descriptor(self): @@ -292,13 +292,13 @@ def test_get_metric_descriptor(self): PROJECT_OUTSIDE, "mock_metric_descriptor" ) delayed_outside = lambda: client.get_metric_descriptor(name_outside) - TestVPCServiceControlV3._do_test(delayed_inside, delayed_outside) + self._do_test(delayed_inside, delayed_outside) @pytest.mark.skipif( - PROJECT_INSIDE is None, reason="Missing environment variable: PROJECT_ID" + not PROJECT_INSIDE, reason="Missing environment variable: PROJECT_ID" ) @pytest.mark.skipif( - PROJECT_OUTSIDE is None, + not PROJECT_OUTSIDE, reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", ) def test_get_monitored_resource_descriptor(self): @@ -311,13 +311,13 @@ def test_get_monitored_resource_descriptor(self): PROJECT_OUTSIDE, "mock_monitored_resource_descriptor" ) delayed_outside = lambda: client.get_monitored_resource_descriptor(name_outside) - TestVPCServiceControlV3._do_test(delayed_inside, delayed_outside) + self._do_test(delayed_inside, delayed_outside) @pytest.mark.skipif( - PROJECT_INSIDE is None, reason="Missing environment variable: PROJECT_ID" + not PROJECT_INSIDE, reason="Missing environment variable: PROJECT_ID" ) @pytest.mark.skipif( - PROJECT_OUTSIDE is None, + not PROJECT_OUTSIDE, reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", ) def test_list_metric_descriptors(self): @@ -326,13 +326,13 @@ def test_list_metric_descriptors(self): delayed_inside = lambda: client.list_metric_descriptors(name_inside) name_outside = client.project_path(PROJECT_OUTSIDE) delayed_outside = lambda: client.list_metric_descriptors(name_outside) - TestVPCServiceControlV3._do_test(delayed_inside, delayed_outside) + self._do_test(delayed_inside, delayed_outside) @pytest.mark.skipif( - PROJECT_INSIDE is None, reason="Missing environment variable: PROJECT_ID" + not PROJECT_INSIDE, reason="Missing environment variable: PROJECT_ID" ) @pytest.mark.skipif( - PROJECT_OUTSIDE is None, + not PROJECT_OUTSIDE, reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", ) def test_list_monitored_resource_descriptors(self): @@ -343,13 +343,13 @@ def test_list_monitored_resource_descriptors(self): delayed_outside = lambda: client.list_monitored_resource_descriptors( name_outside ) - TestVPCServiceControlV3._do_test(delayed_inside, delayed_outside) + self._do_test(delayed_inside, delayed_outside) @pytest.mark.skipif( - PROJECT_INSIDE is None, reason="Missing environment variable: PROJECT_ID" + not PROJECT_INSIDE, reason="Missing environment variable: PROJECT_ID" ) @pytest.mark.skipif( - PROJECT_OUTSIDE is None, + not PROJECT_OUTSIDE, reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", ) def test_list_time_series(self): @@ -362,13 +362,13 @@ def test_list_time_series(self): delayed_outside = lambda: client.list_time_series( name_outside, "", {}, enums.ListTimeSeriesRequest.TimeSeriesView.FULL ) - TestVPCServiceControlV3._do_test(delayed_inside, delayed_outside) + self._do_test(delayed_inside, delayed_outside) @pytest.mark.skipif( - PROJECT_INSIDE is None, reason="Missing environment variable: PROJECT_ID" + not PROJECT_INSIDE, reason="Missing environment variable: PROJECT_ID" ) @pytest.mark.skipif( - PROJECT_OUTSIDE is None, + not PROJECT_OUTSIDE, reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", ) def test_create_notification_channel(self): @@ -377,13 +377,13 @@ def test_create_notification_channel(self): delayed_inside = lambda: client.create_notification_channel(name_inside, {}) name_outside = client.project_path(PROJECT_OUTSIDE) delayed_outside = lambda: client.create_notification_channel(name_outside, {}) - TestVPCServiceControlV3._do_test(delayed_inside, delayed_outside) + self._do_test(delayed_inside, delayed_outside) @pytest.mark.skipif( - PROJECT_INSIDE is None, reason="Missing environment variable: PROJECT_ID" + not PROJECT_INSIDE, reason="Missing environment variable: PROJECT_ID" ) @pytest.mark.skipif( - PROJECT_OUTSIDE is None, + not PROJECT_OUTSIDE, reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", ) def test_delete_notification_channel(self): @@ -396,13 +396,13 @@ def test_delete_notification_channel(self): PROJECT_OUTSIDE, "mock_notification_channel" ) delayed_outside = lambda: client.delete_notification_channel(name_outside) - TestVPCServiceControlV3._do_test(delayed_inside, delayed_outside) + self._do_test(delayed_inside, delayed_outside) @pytest.mark.skipif( - PROJECT_INSIDE is None, reason="Missing environment variable: PROJECT_ID" + not PROJECT_INSIDE, reason="Missing environment variable: PROJECT_ID" ) @pytest.mark.skipif( - PROJECT_OUTSIDE is None, + not PROJECT_OUTSIDE, reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", ) def test_get_notification_channel(self): @@ -415,13 +415,13 @@ def test_get_notification_channel(self): PROJECT_OUTSIDE, "mock_notification_channel" ) delayed_outside = lambda: client.get_notification_channel(name_outside) - TestVPCServiceControlV3._do_test(delayed_inside, delayed_outside) + self._do_test(delayed_inside, delayed_outside) @pytest.mark.skipif( - PROJECT_INSIDE is None, reason="Missing environment variable: PROJECT_ID" + not PROJECT_INSIDE, reason="Missing environment variable: PROJECT_ID" ) @pytest.mark.skipif( - PROJECT_OUTSIDE is None, + not PROJECT_OUTSIDE, reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", ) def test_get_notification_channel_descriptor(self): @@ -436,13 +436,36 @@ def test_get_notification_channel_descriptor(self): delayed_outside = lambda: client.get_notification_channel_descriptor( name_outside ) - TestVPCServiceControlV3._do_test(delayed_inside, delayed_outside) + self._do_test(delayed_inside, delayed_outside) + + @pytest.mark.skipif( + not PROJECT_INSIDE, reason="Missing environment variable: PROJECT_ID" + ) + @pytest.mark.skipif( + not PROJECT_OUTSIDE, + reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", + ) + def test_get_notification_channel_verification_code(self): + client = monitoring_v3.NotificationChannelServiceClient() + name_inside = client.notification_channel_path( + PROJECT_INSIDE, "mock_notification_channel" + ) + delayed_inside = lambda: client.get_notification_channel_verification_code( + name_inside + ) + name_outside = client.notification_channel_path( + PROJECT_OUTSIDE, "mock_notification_channel" + ) + delayed_outside = lambda: client.get_notification_channel_verification_code( + name_outside + ) + self._do_test(delayed_inside, delayed_outside) @pytest.mark.skipif( - PROJECT_INSIDE is None, reason="Missing environment variable: PROJECT_ID" + not PROJECT_INSIDE, reason="Missing environment variable: PROJECT_ID" ) @pytest.mark.skipif( - PROJECT_OUTSIDE is None, + not PROJECT_OUTSIDE, reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", ) def test_list_notification_channel_descriptors(self): @@ -455,13 +478,13 @@ def test_list_notification_channel_descriptors(self): delayed_outside = lambda: client.list_notification_channel_descriptors( name_outside ) - TestVPCServiceControlV3._do_test(delayed_inside, delayed_outside) + self._do_test(delayed_inside, delayed_outside) @pytest.mark.skipif( - PROJECT_INSIDE is None, reason="Missing environment variable: PROJECT_ID" + not PROJECT_INSIDE, reason="Missing environment variable: PROJECT_ID" ) @pytest.mark.skipif( - PROJECT_OUTSIDE is None, + not PROJECT_OUTSIDE, reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", ) def test_list_notification_channels(self): @@ -470,13 +493,36 @@ def test_list_notification_channels(self): delayed_inside = lambda: client.list_notification_channels(name_inside) name_outside = client.project_path(PROJECT_OUTSIDE) delayed_outside = lambda: client.list_notification_channels(name_outside) - TestVPCServiceControlV3._do_test(delayed_inside, delayed_outside) + self._do_test(delayed_inside, delayed_outside) @pytest.mark.skipif( - PROJECT_INSIDE is None, reason="Missing environment variable: PROJECT_ID" + not PROJECT_INSIDE, reason="Missing environment variable: PROJECT_ID" ) @pytest.mark.skipif( - PROJECT_OUTSIDE is None, + not PROJECT_OUTSIDE, + reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", + ) + def test_send_notification_channel_verification_code(self): + client = monitoring_v3.NotificationChannelServiceClient() + name_inside = client.notification_channel_path( + PROJECT_INSIDE, "mock_notification_channel" + ) + delayed_inside = lambda: client.send_notification_channel_verification_code( + name_inside + ) + name_outside = client.notification_channel_path( + PROJECT_OUTSIDE, "mock_notification_channel" + ) + delayed_outside = lambda: client.send_notification_channel_verification_code( + name_outside + ) + self._do_test(delayed_inside, delayed_outside) + + @pytest.mark.skipif( + not PROJECT_INSIDE, reason="Missing environment variable: PROJECT_ID" + ) + @pytest.mark.skipif( + not PROJECT_OUTSIDE, reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", ) def test_update_notification_channel(self): @@ -493,13 +539,32 @@ def test_update_notification_channel(self): delayed_outside = lambda: client.update_notification_channel( {"name": name_outside} ) - TestVPCServiceControlV3._do_test(delayed_inside, delayed_outside) + self._do_test(delayed_inside, delayed_outside) + + @pytest.mark.skipif( + not PROJECT_INSIDE, reason="Missing environment variable: PROJECT_ID" + ) + @pytest.mark.skipif( + not PROJECT_OUTSIDE, + reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", + ) + def test_verify_notification_channel(self): + client = monitoring_v3.NotificationChannelServiceClient() + name_inside = client.notification_channel_path( + PROJECT_INSIDE, "mock_notification_channel" + ) + delayed_inside = lambda: client.verify_notification_channel(name_inside, "") + name_outside = client.notification_channel_path( + PROJECT_OUTSIDE, "mock_notification_channel" + ) + delayed_outside = lambda: client.verify_notification_channel(name_outside, "") + self._do_test(delayed_inside, delayed_outside) @pytest.mark.skipif( - PROJECT_INSIDE is None, reason="Missing environment variable: PROJECT_ID" + not PROJECT_INSIDE, reason="Missing environment variable: PROJECT_ID" ) @pytest.mark.skipif( - PROJECT_OUTSIDE is None, + not PROJECT_OUTSIDE, reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", ) def test_create_uptime_check_config(self): @@ -508,13 +573,13 @@ def test_create_uptime_check_config(self): delayed_inside = lambda: client.create_uptime_check_config(name_inside, {}) name_outside = client.project_path(PROJECT_OUTSIDE) delayed_outside = lambda: client.create_uptime_check_config(name_outside, {}) - TestVPCServiceControlV3._do_test(delayed_inside, delayed_outside) + self._do_test(delayed_inside, delayed_outside) @pytest.mark.skipif( - PROJECT_INSIDE is None, reason="Missing environment variable: PROJECT_ID" + not PROJECT_INSIDE, reason="Missing environment variable: PROJECT_ID" ) @pytest.mark.skipif( - PROJECT_OUTSIDE is None, + not PROJECT_OUTSIDE, reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", ) def test_delete_uptime_check_config(self): @@ -527,13 +592,13 @@ def test_delete_uptime_check_config(self): PROJECT_OUTSIDE, "mock_uptime_check_config" ) delayed_outside = lambda: client.delete_uptime_check_config(name_outside) - TestVPCServiceControlV3._do_test(delayed_inside, delayed_outside) + self._do_test(delayed_inside, delayed_outside) @pytest.mark.skipif( - PROJECT_INSIDE is None, reason="Missing environment variable: PROJECT_ID" + not PROJECT_INSIDE, reason="Missing environment variable: PROJECT_ID" ) @pytest.mark.skipif( - PROJECT_OUTSIDE is None, + not PROJECT_OUTSIDE, reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", ) def test_get_uptime_check_config(self): @@ -546,13 +611,13 @@ def test_get_uptime_check_config(self): PROJECT_OUTSIDE, "mock_uptime_check_config" ) delayed_outside = lambda: client.get_uptime_check_config(name_outside) - TestVPCServiceControlV3._do_test(delayed_inside, delayed_outside) + self._do_test(delayed_inside, delayed_outside) @pytest.mark.skipif( - PROJECT_INSIDE is None, reason="Missing environment variable: PROJECT_ID" + not PROJECT_INSIDE, reason="Missing environment variable: PROJECT_ID" ) @pytest.mark.skipif( - PROJECT_OUTSIDE is None, + not PROJECT_OUTSIDE, reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", ) def test_list_uptime_check_configs(self): @@ -561,13 +626,13 @@ def test_list_uptime_check_configs(self): delayed_inside = lambda: client.list_uptime_check_configs(name_inside) name_outside = client.project_path(PROJECT_OUTSIDE) delayed_outside = lambda: client.list_uptime_check_configs(name_outside) - TestVPCServiceControlV3._do_test(delayed_inside, delayed_outside) + self._do_test(delayed_inside, delayed_outside) @pytest.mark.skipif( - PROJECT_INSIDE is None, reason="Missing environment variable: PROJECT_ID" + not PROJECT_INSIDE, reason="Missing environment variable: PROJECT_ID" ) @pytest.mark.skipif( - PROJECT_OUTSIDE is None, + not PROJECT_OUTSIDE, reason="Missing environment variable: GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT", ) def test_update_uptime_check_config(self): @@ -584,4 +649,4 @@ def test_update_uptime_check_config(self): delayed_outside = lambda: client.update_uptime_check_config( {"name": name_outside} ) - TestVPCServiceControlV3._do_test(delayed_inside, delayed_outside) + self._do_test(delayed_inside, delayed_outside) diff --git a/noxfile.py b/noxfile.py deleted file mode 100644 index 573360929b63..000000000000 --- a/noxfile.py +++ /dev/null @@ -1,39 +0,0 @@ -# Copyright 2017 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os - -import nox - - -@nox.session(python='3.6') -def docs(session): - """Build the docs.""" - - # Install Sphinx and also all of the google-cloud-* packages. - session.chdir(os.path.realpath(os.path.dirname(__file__))) - session.install('-r', os.path.join('docs', 'requirements.txt')) - - # Build the docs! - session.run( - 'bash', os.path.join('.', 'test_utils', 'scripts', 'update_docs.sh')) - - -@nox.session(python='3.6') -def lint_setup_py(session): - """Verify that setup.py is valid (including RST check).""" - - session.install('docutils', 'Pygments') - session.run( - 'python', 'legacy/google-cloud/setup.py', 'check', '--restructuredtext', '--strict') diff --git a/oslogin/docs/conf.py b/oslogin/docs/conf.py index f44d9a3fe9ea..b36144820e0c 100644 --- a/oslogin/docs/conf.py +++ b/oslogin/docs/conf.py @@ -45,6 +45,7 @@ autodoc_default_flags = ["members"] autosummary_generate = True + # Add any paths that contain templates here, relative to this directory. templates_path = ["_templates"] @@ -121,6 +122,7 @@ # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = True + # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for @@ -229,6 +231,7 @@ # -- Options for warnings ------------------------------------------------------ + suppress_warnings = [ # Temporarily suppress this to avoid "more than one target found for # cross-reference" warning, which are intractable for us to avoid while in @@ -284,6 +287,7 @@ # If false, no module index is generated. # latex_domain_indices = True + # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples @@ -301,6 +305,7 @@ # If true, show URL addresses after external links. # man_show_urls = False + # -- Options for Texinfo output ------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples @@ -330,6 +335,7 @@ # If true, do not generate a @detailmenu in the "Top" node's menu. # texinfo_no_detailmenu = False + # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = { "python": ("http://python.readthedocs.org/en/latest/", None), @@ -338,11 +344,12 @@ "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("https://requests.kennethreitz.org/en/stable/", None), + "requests": ("https://requests.kennethreitz.org/en/master/", None), "fastavro": ("https://fastavro.readthedocs.io/en/stable/", None), "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), } + # Napoleon settings napoleon_google_docstring = True napoleon_numpy_docstring = True diff --git a/oslogin/docs/gapic/v1/api.rst b/oslogin/docs/gapic/v1/api.rst index 29e3db95c426..25108a16994c 100644 --- a/oslogin/docs/gapic/v1/api.rst +++ b/oslogin/docs/gapic/v1/api.rst @@ -1,5 +1,5 @@ -Client for Google Cloud OS Login API -==================================== +Client for Cloud OS Login API +============================= .. automodule:: google.cloud.oslogin_v1 :members: diff --git a/oslogin/docs/gapic/v1/types.rst b/oslogin/docs/gapic/v1/types.rst index 25d3fd0f8a94..9694a7e3719f 100644 --- a/oslogin/docs/gapic/v1/types.rst +++ b/oslogin/docs/gapic/v1/types.rst @@ -1,5 +1,5 @@ -Types for Google Cloud OS Login API Client -========================================== +Types for Cloud OS Login API Client +=================================== .. automodule:: google.cloud.oslogin_v1.types :members: \ No newline at end of file diff --git a/oslogin/google/__init__.py b/oslogin/google/__init__.py index dd3a9f485275..8fcc60e2b9c6 100644 --- a/oslogin/google/__init__.py +++ b/oslogin/google/__init__.py @@ -1,4 +1,6 @@ -# Copyright 2018 Google LLC +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/oslogin/google/cloud/__init__.py b/oslogin/google/cloud/__init__.py index dd3a9f485275..8fcc60e2b9c6 100644 --- a/oslogin/google/cloud/__init__.py +++ b/oslogin/google/cloud/__init__.py @@ -1,4 +1,6 @@ -# Copyright 2018 Google LLC +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/oslogin/google/cloud/oslogin.py b/oslogin/google/cloud/oslogin.py index 3c44a48fc35c..416aad010a9b 100644 --- a/oslogin/google/cloud/oslogin.py +++ b/oslogin/google/cloud/oslogin.py @@ -1,4 +1,6 @@ -# Copyright 2018 Google LLC +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -12,9 +14,12 @@ # See the License for the specific language governing permissions and # limitations under the License. + from __future__ import absolute_import from google.cloud.oslogin_v1 import OsLoginServiceClient +from google.cloud.oslogin_v1 import enums from google.cloud.oslogin_v1 import types -__all__ = ("types", "OsLoginServiceClient") + +__all__ = ("enums", "types", "OsLoginServiceClient") diff --git a/oslogin/google/cloud/oslogin_v1/__init__.py b/oslogin/google/cloud/oslogin_v1/__init__.py index 2691d28a9b40..83a5ac263537 100644 --- a/oslogin/google/cloud/oslogin_v1/__init__.py +++ b/oslogin/google/cloud/oslogin_v1/__init__.py @@ -18,11 +18,13 @@ from __future__ import absolute_import from google.cloud.oslogin_v1 import types +from google.cloud.oslogin_v1.gapic import enums from google.cloud.oslogin_v1.gapic import os_login_service_client class OsLoginServiceClient(os_login_service_client.OsLoginServiceClient): __doc__ = os_login_service_client.OsLoginServiceClient.__doc__ + enums = enums -__all__ = ("types", "OsLoginServiceClient") +__all__ = ("enums", "types", "OsLoginServiceClient") diff --git a/oslogin/google/cloud/oslogin_v1/gapic/enums.py b/oslogin/google/cloud/oslogin_v1/gapic/enums.py new file mode 100644 index 000000000000..ab555b005071 --- /dev/null +++ b/oslogin/google/cloud/oslogin_v1/gapic/enums.py @@ -0,0 +1,35 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Wrappers for protocol buffer enum types.""" + +import enum + + +class OperatingSystemType(enum.IntEnum): + """ + The operating system options for account entries. + + Attributes: + OPERATING_SYSTEM_TYPE_UNSPECIFIED (int): The operating system type associated with the user account information is + unspecified. + LINUX (int): Linux user account information. + WINDOWS (int): Windows user account information. + """ + + OPERATING_SYSTEM_TYPE_UNSPECIFIED = 0 + LINUX = 1 + WINDOWS = 2 diff --git a/oslogin/google/cloud/oslogin_v1/gapic/os_login_service_client.py b/oslogin/google/cloud/oslogin_v1/gapic/os_login_service_client.py index d19ee1de971a..ec496c53a918 100644 --- a/oslogin/google/cloud/oslogin_v1/gapic/os_login_service_client.py +++ b/oslogin/google/cloud/oslogin_v1/gapic/os_login_service_client.py @@ -29,6 +29,7 @@ import google.api_core.path_template import grpc +from google.cloud.oslogin_v1.gapic import enums from google.cloud.oslogin_v1.gapic import os_login_service_client_config from google.cloud.oslogin_v1.gapic.transports import os_login_service_grpc_transport from google.cloud.oslogin_v1.proto import common_pb2 @@ -77,19 +78,19 @@ def from_service_account_file(cls, filename, *args, **kwargs): from_service_account_json = from_service_account_file @classmethod - def fingerprint_path(cls, user, fingerprint): - """Return a fully-qualified fingerprint string.""" + def posix_account_path(cls, user, project): + """Return a fully-qualified posix_account string.""" return google.api_core.path_template.expand( - "users/{user}/sshPublicKeys/{fingerprint}", - user=user, - fingerprint=fingerprint, + "users/{user}/projects/{project}", user=user, project=project ) @classmethod - def project_path(cls, user, project): - """Return a fully-qualified project string.""" + def ssh_public_key_path(cls, user, fingerprint): + """Return a fully-qualified ssh_public_key string.""" return google.api_core.path_template.expand( - "users/{user}/projects/{project}", user=user, project=project + "users/{user}/sshPublicKeys/{fingerprint}", + user=user, + fingerprint=fingerprint, ) @classmethod @@ -225,12 +226,12 @@ def delete_posix_account( >>> >>> client = oslogin_v1.OsLoginServiceClient() >>> - >>> name = client.project_path('[USER]', '[PROJECT]') + >>> name = client.posix_account_path('[USER]', '[PROJECT]') >>> >>> client.delete_posix_account(name) Args: - name (str): A reference to the POSIX account to update. POSIX accounts are + name (str): Required. A reference to the POSIX account to update. POSIX accounts are identified by the project ID they are associated with. A reference to the POSIX account is in format ``users/{user}/projects/{project}``. retry (Optional[google.api_core.retry.Retry]): A retry object used @@ -293,14 +294,14 @@ def delete_ssh_public_key( >>> >>> client = oslogin_v1.OsLoginServiceClient() >>> - >>> name = client.fingerprint_path('[USER]', '[FINGERPRINT]') + >>> name = client.ssh_public_key_path('[USER]', '[FINGERPRINT]') >>> >>> client.delete_ssh_public_key(name) Args: - name (str): The fingerprint of the public key to update. Public keys are identified - by their SHA-256 fingerprint. The fingerprint of the public key is in - format ``users/{user}/sshPublicKeys/{fingerprint}``. + name (str): Required. The fingerprint of the public key to update. Public keys are + identified by their SHA-256 fingerprint. The fingerprint of the public + key is in format ``users/{user}/sshPublicKeys/{fingerprint}``. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will be retried using a default configuration. @@ -349,6 +350,8 @@ def delete_ssh_public_key( def get_login_profile( self, name, + project_id=None, + system_id=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, @@ -367,7 +370,9 @@ def get_login_profile( >>> response = client.get_login_profile(name) Args: - name (str): The unique ID for the user in format ``users/{user}``. + name (str): Required. The unique ID for the user in format ``users/{user}``. + project_id (str): The project ID of the Google Cloud Platform project. + system_id (str): A system ID for filtering the results of the request. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will be retried using a default configuration. @@ -398,7 +403,9 @@ def get_login_profile( client_info=self._client_info, ) - request = oslogin_pb2.GetLoginProfileRequest(name=name) + request = oslogin_pb2.GetLoginProfileRequest( + name=name, project_id=project_id, system_id=system_id + ) if metadata is None: metadata = [] metadata = list(metadata) @@ -431,12 +438,12 @@ def get_ssh_public_key( >>> >>> client = oslogin_v1.OsLoginServiceClient() >>> - >>> name = client.fingerprint_path('[USER]', '[FINGERPRINT]') + >>> name = client.ssh_public_key_path('[USER]', '[FINGERPRINT]') >>> >>> response = client.get_ssh_public_key(name) Args: - name (str): The fingerprint of the public key to retrieve. Public keys are + name (str): Required. The fingerprint of the public key to retrieve. Public keys are identified by their SHA-256 fingerprint. The fingerprint of the public key is in format ``users/{user}/sshPublicKeys/{fingerprint}``. retry (Optional[google.api_core.retry.Retry]): A retry object used @@ -490,7 +497,7 @@ def get_ssh_public_key( def import_ssh_public_key( self, parent, - ssh_public_key, + ssh_public_key=None, project_id=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, @@ -508,14 +515,11 @@ def import_ssh_public_key( >>> >>> parent = client.user_path('[USER]') >>> - >>> # TODO: Initialize `ssh_public_key`: - >>> ssh_public_key = {} - >>> - >>> response = client.import_ssh_public_key(parent, ssh_public_key) + >>> response = client.import_ssh_public_key(parent) Args: - parent (str): The unique ID for the user in format ``users/{user}``. - ssh_public_key (Union[dict, ~google.cloud.oslogin_v1.types.SshPublicKey]): The SSH public key and expiration time. + parent (str): Required. The unique ID for the user in format ``users/{user}``. + ssh_public_key (Union[dict, ~google.cloud.oslogin_v1.types.SshPublicKey]): Optional. The SSH public key and expiration time. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.oslogin_v1.types.SshPublicKey` @@ -588,7 +592,7 @@ def update_ssh_public_key( >>> >>> client = oslogin_v1.OsLoginServiceClient() >>> - >>> name = client.fingerprint_path('[USER]', '[FINGERPRINT]') + >>> name = client.ssh_public_key_path('[USER]', '[FINGERPRINT]') >>> >>> # TODO: Initialize `ssh_public_key`: >>> ssh_public_key = {} @@ -596,10 +600,10 @@ def update_ssh_public_key( >>> response = client.update_ssh_public_key(name, ssh_public_key) Args: - name (str): The fingerprint of the public key to update. Public keys are identified - by their SHA-256 fingerprint. The fingerprint of the public key is in - format ``users/{user}/sshPublicKeys/{fingerprint}``. - ssh_public_key (Union[dict, ~google.cloud.oslogin_v1.types.SshPublicKey]): The SSH public key and expiration time. + name (str): Required. The fingerprint of the public key to update. Public keys are + identified by their SHA-256 fingerprint. The fingerprint of the public + key is in format ``users/{user}/sshPublicKeys/{fingerprint}``. + ssh_public_key (Union[dict, ~google.cloud.oslogin_v1.types.SshPublicKey]): Required. The SSH public key and expiration time. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.oslogin_v1.types.SshPublicKey` diff --git a/oslogin/google/cloud/oslogin_v1/gapic/os_login_service_client_config.py b/oslogin/google/cloud/oslogin_v1/gapic/os_login_service_client_config.py index e55e7963358b..6d64718728d7 100644 --- a/oslogin/google/cloud/oslogin_v1/gapic/os_login_service_client_config.py +++ b/oslogin/google/cloud/oslogin_v1/gapic/os_login_service_client_config.py @@ -10,41 +10,41 @@ "initial_retry_delay_millis": 100, "retry_delay_multiplier": 1.3, "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 10000, + "initial_rpc_timeout_millis": 20000, "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 10000, + "max_rpc_timeout_millis": 20000, "total_timeout_millis": 600000, } }, "methods": { "DeletePosixAccount": { - "timeout_millis": 10000, - "retry_codes_name": "idempotent", + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", "retry_params_name": "default", }, "DeleteSshPublicKey": { - "timeout_millis": 10000, - "retry_codes_name": "idempotent", + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", "retry_params_name": "default", }, "GetLoginProfile": { - "timeout_millis": 10000, + "timeout_millis": 60000, "retry_codes_name": "idempotent", "retry_params_name": "default", }, "GetSshPublicKey": { - "timeout_millis": 10000, + "timeout_millis": 60000, "retry_codes_name": "idempotent", "retry_params_name": "default", }, "ImportSshPublicKey": { - "timeout_millis": 10000, - "retry_codes_name": "idempotent", + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", "retry_params_name": "default", }, "UpdateSshPublicKey": { - "timeout_millis": 10000, - "retry_codes_name": "idempotent", + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", "retry_params_name": "default", }, }, diff --git a/oslogin/google/cloud/oslogin_v1/gapic/transports/os_login_service_grpc_transport.py b/oslogin/google/cloud/oslogin_v1/gapic/transports/os_login_service_grpc_transport.py index 6414cf27a0da..2860dfe42952 100644 --- a/oslogin/google/cloud/oslogin_v1/gapic/transports/os_login_service_grpc_transport.py +++ b/oslogin/google/cloud/oslogin_v1/gapic/transports/os_login_service_grpc_transport.py @@ -33,9 +33,7 @@ class OsLoginServiceGrpcTransport(object): # in this service. _OAUTH_SCOPES = ( "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/cloud-platform.read-only", "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/compute.readonly", ) def __init__( diff --git a/oslogin/google/cloud/oslogin_v1/proto/common_pb2.py b/oslogin/google/cloud/oslogin_v1/proto/common_pb2.py index 7a81f9e4a322..c842ef79bc1d 100644 --- a/oslogin/google/cloud/oslogin_v1/proto/common_pb2.py +++ b/oslogin/google/cloud/oslogin_v1/proto/common_pb2.py @@ -1,32 +1,72 @@ +# -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: google/cloud/oslogin/common/common.proto import sys _b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) +from google.protobuf.internal import enum_type_wrapper from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database -from google.protobuf import descriptor_pb2 # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 +from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 DESCRIPTOR = _descriptor.FileDescriptor( name="google/cloud/oslogin/common/common.proto", package="google.cloud.oslogin.common", syntax="proto3", + serialized_options=_b( + "\n\037com.google.cloud.oslogin.commonB\014OsLoginProtoZAgoogle.golang.org/genproto/googleapis/cloud/oslogin/common;common\252\002\033Google.Cloud.OsLogin.Common\312\002\033Google\\Cloud\\OsLogin\\Common\352A+\n\033oslogin.googleapis.com/User\022\014users/{user}" + ), serialized_pb=_b( - '\n(google/cloud/oslogin/common/common.proto\x12\x1bgoogle.cloud.oslogin.common\x1a\x1cgoogle/api/annotations.proto"\xa8\x01\n\x0cPosixAccount\x12\x0f\n\x07primary\x18\x01 \x01(\x08\x12\x10\n\x08username\x18\x02 \x01(\t\x12\x0b\n\x03uid\x18\x03 \x01(\x03\x12\x0b\n\x03gid\x18\x04 \x01(\x03\x12\x16\n\x0ehome_directory\x18\x05 \x01(\t\x12\r\n\x05shell\x18\x06 \x01(\t\x12\r\n\x05gecos\x18\x07 \x01(\t\x12\x11\n\tsystem_id\x18\x08 \x01(\t\x12\x12\n\naccount_id\x18\t \x01(\t"N\n\x0cSshPublicKey\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x1c\n\x14\x65xpiration_time_usec\x18\x02 \x01(\x03\x12\x13\n\x0b\x66ingerprint\x18\x03 \x01(\tB\xae\x01\n\x1f\x63om.google.cloud.oslogin.commonB\x0cOsLoginProtoZAgoogle.golang.org/genproto/googleapis/cloud/oslogin/common;common\xaa\x02\x1bGoogle.Cloud.OsLogin.Common\xca\x02\x1bGoogle\\Cloud\\OsLogin\\Commonb\x06proto3' + '\n(google/cloud/oslogin/common/common.proto\x12\x1bgoogle.cloud.oslogin.common\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto"\xdc\x02\n\x0cPosixAccount\x12\x0f\n\x07primary\x18\x01 \x01(\x08\x12\x10\n\x08username\x18\x02 \x01(\t\x12\x0b\n\x03uid\x18\x03 \x01(\x03\x12\x0b\n\x03gid\x18\x04 \x01(\x03\x12\x16\n\x0ehome_directory\x18\x05 \x01(\t\x12\r\n\x05shell\x18\x06 \x01(\t\x12\r\n\x05gecos\x18\x07 \x01(\t\x12\x11\n\tsystem_id\x18\x08 \x01(\t\x12\x17\n\naccount_id\x18\t \x01(\tB\x03\xe0\x41\x03\x12O\n\x15operating_system_type\x18\n \x01(\x0e\x32\x30.google.cloud.oslogin.common.OperatingSystemType\x12\x11\n\x04name\x18\x0b \x01(\tB\x03\xe0\x41\x03:I\xea\x41\x46\n#oslogin.googleapis.com/PosixAccount\x12\x1fusers/{user}/projects/{project}"\xba\x01\n\x0cSshPublicKey\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x1c\n\x14\x65xpiration_time_usec\x18\x02 \x01(\x03\x12\x18\n\x0b\x66ingerprint\x18\x03 \x01(\tB\x03\xe0\x41\x03\x12\x11\n\x04name\x18\x04 \x01(\tB\x03\xe0\x41\x03:R\xea\x41O\n#oslogin.googleapis.com/SshPublicKey\x12(users/{user}/sshPublicKeys/{fingerprint}*T\n\x13OperatingSystemType\x12%\n!OPERATING_SYSTEM_TYPE_UNSPECIFIED\x10\x00\x12\t\n\x05LINUX\x10\x01\x12\x0b\n\x07WINDOWS\x10\x02\x42\xdc\x01\n\x1f\x63om.google.cloud.oslogin.commonB\x0cOsLoginProtoZAgoogle.golang.org/genproto/googleapis/cloud/oslogin/common;common\xaa\x02\x1bGoogle.Cloud.OsLogin.Common\xca\x02\x1bGoogle\\Cloud\\OsLogin\\Common\xea\x41+\n\x1boslogin.googleapis.com/User\x12\x0cusers/{user}b\x06proto3' ), - dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR], + dependencies=[ + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, + google_dot_api_dot_resource__pb2.DESCRIPTOR, + ], +) + +_OPERATINGSYSTEMTYPE = _descriptor.EnumDescriptor( + name="OperatingSystemType", + full_name="google.cloud.oslogin.common.OperatingSystemType", + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name="OPERATING_SYSTEM_TYPE_UNSPECIFIED", + index=0, + number=0, + serialized_options=None, + type=None, + ), + _descriptor.EnumValueDescriptor( + name="LINUX", index=1, number=1, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="WINDOWS", index=2, number=2, serialized_options=None, type=None + ), + ], + containing_type=None, + serialized_options=None, + serialized_start=673, + serialized_end=757, ) +_sym_db.RegisterEnumDescriptor(_OPERATINGSYSTEMTYPE) + +OperatingSystemType = enum_type_wrapper.EnumTypeWrapper(_OPERATINGSYSTEMTYPE) +OPERATING_SYSTEM_TYPE_UNSPECIFIED = 0 +LINUX = 1 +WINDOWS = 2 _POSIXACCOUNT = _descriptor.Descriptor( @@ -51,7 +91,8 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, + file=DESCRIPTOR, ), _descriptor.FieldDescriptor( name="username", @@ -68,7 +109,8 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, + file=DESCRIPTOR, ), _descriptor.FieldDescriptor( name="uid", @@ -85,7 +127,8 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, + file=DESCRIPTOR, ), _descriptor.FieldDescriptor( name="gid", @@ -102,7 +145,8 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, + file=DESCRIPTOR, ), _descriptor.FieldDescriptor( name="home_directory", @@ -119,7 +163,8 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, + file=DESCRIPTOR, ), _descriptor.FieldDescriptor( name="shell", @@ -136,7 +181,8 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, + file=DESCRIPTOR, ), _descriptor.FieldDescriptor( name="gecos", @@ -153,7 +199,8 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, + file=DESCRIPTOR, ), _descriptor.FieldDescriptor( name="system_id", @@ -170,7 +217,8 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, + file=DESCRIPTOR, ), _descriptor.FieldDescriptor( name="account_id", @@ -187,19 +235,58 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=_b("\340A\003"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="operating_system_type", + full_name="google.cloud.oslogin.common.PosixAccount.operating_system_type", + index=9, + number=10, + type=14, + cpp_type=8, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="name", + full_name="google.cloud.oslogin.common.PosixAccount.name", + index=10, + number=11, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\003"), + file=DESCRIPTOR, ), ], extensions=[], nested_types=[], enum_types=[], - options=None, + serialized_options=_b( + "\352AF\n#oslogin.googleapis.com/PosixAccount\022\037users/{user}/projects/{project}" + ), is_extendable=False, syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=104, - serialized_end=272, + serialized_start=134, + serialized_end=482, ) @@ -225,7 +312,8 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, + file=DESCRIPTOR, ), _descriptor.FieldDescriptor( name="expiration_time_usec", @@ -242,7 +330,8 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, + file=DESCRIPTOR, ), _descriptor.FieldDescriptor( name="fingerprint", @@ -259,23 +348,46 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=_b("\340A\003"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="name", + full_name="google.cloud.oslogin.common.SshPublicKey.name", + index=3, + number=4, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\003"), + file=DESCRIPTOR, ), ], extensions=[], nested_types=[], enum_types=[], - options=None, + serialized_options=_b( + "\352AO\n#oslogin.googleapis.com/SshPublicKey\022(users/{user}/sshPublicKeys/{fingerprint}" + ), is_extendable=False, syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=274, - serialized_end=352, + serialized_start=485, + serialized_end=671, ) +_POSIXACCOUNT.fields_by_name["operating_system_type"].enum_type = _OPERATINGSYSTEMTYPE DESCRIPTOR.message_types_by_name["PosixAccount"] = _POSIXACCOUNT DESCRIPTOR.message_types_by_name["SshPublicKey"] = _SSHPUBLICKEY +DESCRIPTOR.enum_types_by_name["OperatingSystemType"] = _OPERATINGSYSTEMTYPE _sym_db.RegisterFileDescriptor(DESCRIPTOR) PosixAccount = _reflection.GeneratedProtocolMessageType( @@ -307,6 +419,10 @@ applies to. By default, the empty value is used. account_id: Output only. A POSIX account identifier. + operating_system_type: + The operating system type where this account applies. + name: + Output only. The canonical resource name. """, # @@protoc_insertion_point(class_scope:google.cloud.oslogin.common.PosixAccount) ), @@ -329,6 +445,8 @@ An expiration time in microseconds since epoch. fingerprint: Output only. The SHA-256 fingerprint of the SSH public key. + name: + Output only. The canonical resource name. """, # @@protoc_insertion_point(class_scope:google.cloud.oslogin.common.SshPublicKey) ), @@ -336,11 +454,11 @@ _sym_db.RegisterMessage(SshPublicKey) -DESCRIPTOR.has_options = True -DESCRIPTOR._options = _descriptor._ParseOptions( - descriptor_pb2.FileOptions(), - _b( - "\n\037com.google.cloud.oslogin.commonB\014OsLoginProtoZAgoogle.golang.org/genproto/googleapis/cloud/oslogin/common;common\252\002\033Google.Cloud.OsLogin.Common\312\002\033Google\\Cloud\\OsLogin\\Common" - ), -) +DESCRIPTOR._options = None +_POSIXACCOUNT.fields_by_name["account_id"]._options = None +_POSIXACCOUNT.fields_by_name["name"]._options = None +_POSIXACCOUNT._options = None +_SSHPUBLICKEY.fields_by_name["fingerprint"]._options = None +_SSHPUBLICKEY.fields_by_name["name"]._options = None +_SSHPUBLICKEY._options = None # @@protoc_insertion_point(module_scope) diff --git a/oslogin/google/cloud/oslogin_v1/proto/oslogin.proto b/oslogin/google/cloud/oslogin_v1/proto/oslogin.proto index d76bd47e650c..75d7c060518b 100644 --- a/oslogin/google/cloud/oslogin_v1/proto/oslogin.proto +++ b/oslogin/google/cloud/oslogin_v1/proto/oslogin.proto @@ -1,4 +1,4 @@ -// Copyright 2017 Google Inc. +// Copyright 2019 Google LLC. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -11,12 +11,16 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. +// syntax = "proto3"; package google.cloud.oslogin.v1; import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; import "google/cloud/oslogin/common/common.proto"; import "google/protobuf/empty.proto"; import "google/protobuf/field_mask.proto"; @@ -33,20 +37,25 @@ option php_namespace = "Google\\Cloud\\OsLogin\\V1"; // The Cloud OS Login API allows you to manage users and their associated SSH // public keys for logging into virtual machines on Google Cloud Platform. service OsLoginService { + option (google.api.default_host) = "oslogin.googleapis.com"; + option (google.api.oauth_scopes) = + "https://www.googleapis.com/auth/cloud-platform," + "https://www.googleapis.com/auth/compute"; + // Deletes a POSIX account. - rpc DeletePosixAccount(DeletePosixAccountRequest) - returns (google.protobuf.Empty) { + rpc DeletePosixAccount(DeletePosixAccountRequest) returns (google.protobuf.Empty) { option (google.api.http) = { delete: "/v1/{name=users/*/projects/*}" }; + option (google.api.method_signature) = "name"; } // Deletes an SSH public key. - rpc DeleteSshPublicKey(DeleteSshPublicKeyRequest) - returns (google.protobuf.Empty) { + rpc DeleteSshPublicKey(DeleteSshPublicKeyRequest) returns (google.protobuf.Empty) { option (google.api.http) = { delete: "/v1/{name=users/*/sshPublicKeys/*}" }; + option (google.api.method_signature) = "name"; } // Retrieves the profile information used for logging in to a virtual machine @@ -55,92 +64,122 @@ service OsLoginService { option (google.api.http) = { get: "/v1/{name=users/*}/loginProfile" }; + option (google.api.method_signature) = "name"; } // Retrieves an SSH public key. - rpc GetSshPublicKey(GetSshPublicKeyRequest) - returns (google.cloud.oslogin.common.SshPublicKey) { + rpc GetSshPublicKey(GetSshPublicKeyRequest) returns (google.cloud.oslogin.common.SshPublicKey) { option (google.api.http) = { get: "/v1/{name=users/*/sshPublicKeys/*}" }; + option (google.api.method_signature) = "name"; } // Adds an SSH public key and returns the profile information. Default POSIX // account information is set when no username and UID exist as part of the // login profile. - rpc ImportSshPublicKey(ImportSshPublicKeyRequest) - returns (ImportSshPublicKeyResponse) { + rpc ImportSshPublicKey(ImportSshPublicKeyRequest) returns (ImportSshPublicKeyResponse) { option (google.api.http) = { post: "/v1/{parent=users/*}:importSshPublicKey" body: "ssh_public_key" }; + option (google.api.method_signature) = "parent,ssh_public_key"; + option (google.api.method_signature) = "parent,ssh_public_key,project_id"; } // Updates an SSH public key and returns the profile information. This method // supports patch semantics. - rpc UpdateSshPublicKey(UpdateSshPublicKeyRequest) - returns (google.cloud.oslogin.common.SshPublicKey) { + rpc UpdateSshPublicKey(UpdateSshPublicKeyRequest) returns (google.cloud.oslogin.common.SshPublicKey) { option (google.api.http) = { patch: "/v1/{name=users/*/sshPublicKeys/*}" body: "ssh_public_key" }; + option (google.api.method_signature) = "name,ssh_public_key"; + option (google.api.method_signature) = "name,ssh_public_key,update_mask"; } } // The user profile information used for logging in to a virtual machine on // Google Compute Engine. message LoginProfile { - // The primary email address that uniquely identifies the user. - string name = 1; + // Required. A unique user ID. + string name = 1 [(google.api.field_behavior) = REQUIRED]; // The list of POSIX accounts associated with the user. repeated google.cloud.oslogin.common.PosixAccount posix_accounts = 2; // A map from SSH public key fingerprint to the associated key object. map ssh_public_keys = 3; - - // Indicates if the user is suspended. A suspended user cannot log in but - // their profile information is retained. - bool suspended = 4; } // A request message for deleting a POSIX account entry. message DeletePosixAccountRequest { - // A reference to the POSIX account to update. POSIX accounts are identified + // Required. A reference to the POSIX account to update. POSIX accounts are identified // by the project ID they are associated with. A reference to the POSIX // account is in format `users/{user}/projects/{project}`. - string name = 1; + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "oslogin.googleapis.com/PosixAccount" + } + ]; } // A request message for deleting an SSH public key. message DeleteSshPublicKeyRequest { - // The fingerprint of the public key to update. Public keys are identified by + // Required. The fingerprint of the public key to update. Public keys are identified by // their SHA-256 fingerprint. The fingerprint of the public key is in format // `users/{user}/sshPublicKeys/{fingerprint}`. - string name = 1; + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "oslogin.googleapis.com/SshPublicKey" + } + ]; } // A request message for retrieving the login profile information for a user. message GetLoginProfileRequest { - // The unique ID for the user in format `users/{user}`. - string name = 1; + // Required. The unique ID for the user in format `users/{user}`. + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + child_type: "oslogin.googleapis.com/PosixAccount" + } + ]; + + // The project ID of the Google Cloud Platform project. + string project_id = 2; + + // A system ID for filtering the results of the request. + string system_id = 3; } // A request message for retrieving an SSH public key. message GetSshPublicKeyRequest { - // The fingerprint of the public key to retrieve. Public keys are identified + // Required. The fingerprint of the public key to retrieve. Public keys are identified // by their SHA-256 fingerprint. The fingerprint of the public key is in // format `users/{user}/sshPublicKeys/{fingerprint}`. - string name = 1; + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "oslogin.googleapis.com/SshPublicKey" + } + ]; } // A request message for importing an SSH public key. message ImportSshPublicKeyRequest { - // The unique ID for the user in format `users/{user}`. - string parent = 1; + // Required. The unique ID for the user in format `users/{user}`. + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + child_type: "oslogin.googleapis.com/SshPublicKey" + } + ]; - // The SSH public key and expiration time. - google.cloud.oslogin.common.SshPublicKey ssh_public_key = 2; + // Optional. The SSH public key and expiration time. + google.cloud.oslogin.common.SshPublicKey ssh_public_key = 2 [(google.api.field_behavior) = OPTIONAL]; // The project ID of the Google Cloud Platform project. string project_id = 3; @@ -154,13 +193,18 @@ message ImportSshPublicKeyResponse { // A request message for updating an SSH public key. message UpdateSshPublicKeyRequest { - // The fingerprint of the public key to update. Public keys are identified by + // Required. The fingerprint of the public key to update. Public keys are identified by // their SHA-256 fingerprint. The fingerprint of the public key is in format // `users/{user}/sshPublicKeys/{fingerprint}`. - string name = 1; - - // The SSH public key and expiration time. - google.cloud.oslogin.common.SshPublicKey ssh_public_key = 2; + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "oslogin.googleapis.com/SshPublicKey" + } + ]; + + // Required. The SSH public key and expiration time. + google.cloud.oslogin.common.SshPublicKey ssh_public_key = 2 [(google.api.field_behavior) = REQUIRED]; // Mask to control which fields get updated. Updates all if not present. google.protobuf.FieldMask update_mask = 3; diff --git a/oslogin/google/cloud/oslogin_v1/proto/oslogin/common/common_pb2.py b/oslogin/google/cloud/oslogin_v1/proto/oslogin/common/common_pb2.py deleted file mode 100644 index 93acf477103a..000000000000 --- a/oslogin/google/cloud/oslogin_v1/proto/oslogin/common/common_pb2.py +++ /dev/null @@ -1,355 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/oslogin/common/common.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/oslogin/common/common.proto", - package="google.cloud.oslogin.common", - syntax="proto3", - serialized_options=_b( - "\n\037com.google.cloud.oslogin.commonB\014OsLoginProtoZAgoogle.golang.org/genproto/googleapis/cloud/oslogin/common;common\252\002\033Google.Cloud.OsLogin.Common\312\002\033Google\\Cloud\\OsLogin\\Common" - ), - serialized_pb=_b( - '\n(google/cloud/oslogin/common/common.proto\x12\x1bgoogle.cloud.oslogin.common\x1a\x1cgoogle/api/annotations.proto"\xa8\x01\n\x0cPosixAccount\x12\x0f\n\x07primary\x18\x01 \x01(\x08\x12\x10\n\x08username\x18\x02 \x01(\t\x12\x0b\n\x03uid\x18\x03 \x01(\x03\x12\x0b\n\x03gid\x18\x04 \x01(\x03\x12\x16\n\x0ehome_directory\x18\x05 \x01(\t\x12\r\n\x05shell\x18\x06 \x01(\t\x12\r\n\x05gecos\x18\x07 \x01(\t\x12\x11\n\tsystem_id\x18\x08 \x01(\t\x12\x12\n\naccount_id\x18\t \x01(\t"N\n\x0cSshPublicKey\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x1c\n\x14\x65xpiration_time_usec\x18\x02 \x01(\x03\x12\x13\n\x0b\x66ingerprint\x18\x03 \x01(\tB\xae\x01\n\x1f\x63om.google.cloud.oslogin.commonB\x0cOsLoginProtoZAgoogle.golang.org/genproto/googleapis/cloud/oslogin/common;common\xaa\x02\x1bGoogle.Cloud.OsLogin.Common\xca\x02\x1bGoogle\\Cloud\\OsLogin\\Commonb\x06proto3' - ), - dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR], -) - - -_POSIXACCOUNT = _descriptor.Descriptor( - name="PosixAccount", - full_name="google.cloud.oslogin.common.PosixAccount", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="primary", - full_name="google.cloud.oslogin.common.PosixAccount.primary", - index=0, - number=1, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="username", - full_name="google.cloud.oslogin.common.PosixAccount.username", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="uid", - full_name="google.cloud.oslogin.common.PosixAccount.uid", - index=2, - number=3, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="gid", - full_name="google.cloud.oslogin.common.PosixAccount.gid", - index=3, - number=4, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="home_directory", - full_name="google.cloud.oslogin.common.PosixAccount.home_directory", - index=4, - number=5, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="shell", - full_name="google.cloud.oslogin.common.PosixAccount.shell", - index=5, - number=6, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="gecos", - full_name="google.cloud.oslogin.common.PosixAccount.gecos", - index=6, - number=7, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="system_id", - full_name="google.cloud.oslogin.common.PosixAccount.system_id", - index=7, - number=8, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="account_id", - full_name="google.cloud.oslogin.common.PosixAccount.account_id", - index=8, - number=9, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=104, - serialized_end=272, -) - - -_SSHPUBLICKEY = _descriptor.Descriptor( - name="SshPublicKey", - full_name="google.cloud.oslogin.common.SshPublicKey", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.cloud.oslogin.common.SshPublicKey.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="expiration_time_usec", - full_name="google.cloud.oslogin.common.SshPublicKey.expiration_time_usec", - index=1, - number=2, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="fingerprint", - full_name="google.cloud.oslogin.common.SshPublicKey.fingerprint", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=274, - serialized_end=352, -) - -DESCRIPTOR.message_types_by_name["PosixAccount"] = _POSIXACCOUNT -DESCRIPTOR.message_types_by_name["SshPublicKey"] = _SSHPUBLICKEY -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -PosixAccount = _reflection.GeneratedProtocolMessageType( - "PosixAccount", - (_message.Message,), - dict( - DESCRIPTOR=_POSIXACCOUNT, - __module__="google.cloud.oslogin.common.common_pb2", - __doc__="""The POSIX account information associated with a Google account. - - - Attributes: - primary: - Only one POSIX account can be marked as primary. - username: - The username of the POSIX account. - uid: - The user ID. - gid: - The default group ID. - home_directory: - The path to the home directory for this account. - shell: - The path to the logic shell for this account. - gecos: - The GECOS (user information) entry for this account. - system_id: - System identifier for which account the username or uid - applies to. By default, the empty value is used. - account_id: - Output only. A POSIX account identifier. - """, - # @@protoc_insertion_point(class_scope:google.cloud.oslogin.common.PosixAccount) - ), -) -_sym_db.RegisterMessage(PosixAccount) - -SshPublicKey = _reflection.GeneratedProtocolMessageType( - "SshPublicKey", - (_message.Message,), - dict( - DESCRIPTOR=_SSHPUBLICKEY, - __module__="google.cloud.oslogin.common.common_pb2", - __doc__="""The SSH public key information associated with a Google account. - - - Attributes: - key: - Public key text in SSH format, defined by RFC4253 section 6.6. - expiration_time_usec: - An expiration time in microseconds since epoch. - fingerprint: - Output only. The SHA-256 fingerprint of the SSH public key. - """, - # @@protoc_insertion_point(class_scope:google.cloud.oslogin.common.SshPublicKey) - ), -) -_sym_db.RegisterMessage(SshPublicKey) - - -DESCRIPTOR._options = None -# @@protoc_insertion_point(module_scope) diff --git a/oslogin/google/cloud/oslogin_v1/proto/oslogin_pb2.py b/oslogin/google/cloud/oslogin_v1/proto/oslogin_pb2.py index 3eb27bda6a64..0feaf4be9ae9 100644 --- a/oslogin/google/cloud/oslogin_v1/proto/oslogin_pb2.py +++ b/oslogin/google/cloud/oslogin_v1/proto/oslogin_pb2.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: google/cloud/oslogin_v1/proto/oslogin.proto @@ -15,6 +16,9 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 +from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 from google.cloud.oslogin_v1.proto import ( common_pb2 as google_dot_cloud_dot_oslogin_dot_common_dot_common__pb2, ) @@ -30,10 +34,13 @@ "\n\033com.google.cloud.oslogin.v1B\014OsLoginProtoP\001Z>google.golang.org/genproto/googleapis/cloud/oslogin/v1;oslogin\252\002\027Google.Cloud.OsLogin.V1\312\002\027Google\\Cloud\\OsLogin\\V1" ), serialized_pb=_b( - '\n+google/cloud/oslogin_v1/proto/oslogin.proto\x12\x17google.cloud.oslogin.v1\x1a\x1cgoogle/api/annotations.proto\x1a(google/cloud/oslogin/common/common.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto"\xa6\x02\n\x0cLoginProfile\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x41\n\x0eposix_accounts\x18\x02 \x03(\x0b\x32).google.cloud.oslogin.common.PosixAccount\x12Q\n\x0fssh_public_keys\x18\x03 \x03(\x0b\x32\x38.google.cloud.oslogin.v1.LoginProfile.SshPublicKeysEntry\x12\x11\n\tsuspended\x18\x04 \x01(\x08\x1a_\n\x12SshPublicKeysEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x38\n\x05value\x18\x02 \x01(\x0b\x32).google.cloud.oslogin.common.SshPublicKey:\x02\x38\x01")\n\x19\x44\x65letePosixAccountRequest\x12\x0c\n\x04name\x18\x01 \x01(\t")\n\x19\x44\x65leteSshPublicKeyRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"&\n\x16GetLoginProfileRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"&\n\x16GetSshPublicKeyRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"\x82\x01\n\x19ImportSshPublicKeyRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x41\n\x0essh_public_key\x18\x02 \x01(\x0b\x32).google.cloud.oslogin.common.SshPublicKey\x12\x12\n\nproject_id\x18\x03 \x01(\t"Z\n\x1aImportSshPublicKeyResponse\x12<\n\rlogin_profile\x18\x01 \x01(\x0b\x32%.google.cloud.oslogin.v1.LoginProfile"\x9d\x01\n\x19UpdateSshPublicKeyRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x41\n\x0essh_public_key\x18\x02 \x01(\x0b\x32).google.cloud.oslogin.common.SshPublicKey\x12/\n\x0bupdate_mask\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.FieldMask2\xcd\x07\n\x0eOsLoginService\x12\x87\x01\n\x12\x44\x65letePosixAccount\x12\x32.google.cloud.oslogin.v1.DeletePosixAccountRequest\x1a\x16.google.protobuf.Empty"%\x82\xd3\xe4\x93\x02\x1f*\x1d/v1/{name=users/*/projects/*}\x12\x8c\x01\n\x12\x44\x65leteSshPublicKey\x12\x32.google.cloud.oslogin.v1.DeleteSshPublicKeyRequest\x1a\x16.google.protobuf.Empty"*\x82\xd3\xe4\x93\x02$*"/v1/{name=users/*/sshPublicKeys/*}\x12\x92\x01\n\x0fGetLoginProfile\x12/.google.cloud.oslogin.v1.GetLoginProfileRequest\x1a%.google.cloud.oslogin.v1.LoginProfile"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{name=users/*}/loginProfile\x12\x99\x01\n\x0fGetSshPublicKey\x12/.google.cloud.oslogin.v1.GetSshPublicKeyRequest\x1a).google.cloud.oslogin.common.SshPublicKey"*\x82\xd3\xe4\x93\x02$\x12"/v1/{name=users/*/sshPublicKeys/*}\x12\xbe\x01\n\x12ImportSshPublicKey\x12\x32.google.cloud.oslogin.v1.ImportSshPublicKeyRequest\x1a\x33.google.cloud.oslogin.v1.ImportSshPublicKeyResponse"?\x82\xd3\xe4\x93\x02\x39"\'/v1/{parent=users/*}:importSshPublicKey:\x0essh_public_key\x12\xaf\x01\n\x12UpdateSshPublicKey\x12\x32.google.cloud.oslogin.v1.UpdateSshPublicKeyRequest\x1a).google.cloud.oslogin.common.SshPublicKey":\x82\xd3\xe4\x93\x02\x34\x32"/v1/{name=users/*/sshPublicKeys/*}:\x0essh_public_keyB\xa1\x01\n\x1b\x63om.google.cloud.oslogin.v1B\x0cOsLoginProtoP\x01Z>google.golang.org/genproto/googleapis/cloud/oslogin/v1;oslogin\xaa\x02\x17Google.Cloud.OsLogin.V1\xca\x02\x17Google\\Cloud\\OsLogin\\V1b\x06proto3' + '\n+google/cloud/oslogin_v1/proto/oslogin.proto\x12\x17google.cloud.oslogin.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a(google/cloud/oslogin/common/common.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto"\x98\x02\n\x0cLoginProfile\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x41\n\x0eposix_accounts\x18\x02 \x03(\x0b\x32).google.cloud.oslogin.common.PosixAccount\x12Q\n\x0fssh_public_keys\x18\x03 \x03(\x0b\x32\x38.google.cloud.oslogin.v1.LoginProfile.SshPublicKeysEntry\x1a_\n\x12SshPublicKeysEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x38\n\x05value\x18\x02 \x01(\x0b\x32).google.cloud.oslogin.common.SshPublicKey:\x02\x38\x01"V\n\x19\x44\x65letePosixAccountRequest\x12\x39\n\x04name\x18\x01 \x01(\tB+\xe0\x41\x02\xfa\x41%\n#oslogin.googleapis.com/PosixAccount"V\n\x19\x44\x65leteSshPublicKeyRequest\x12\x39\n\x04name\x18\x01 \x01(\tB+\xe0\x41\x02\xfa\x41%\n#oslogin.googleapis.com/SshPublicKey"z\n\x16GetLoginProfileRequest\x12\x39\n\x04name\x18\x01 \x01(\tB+\xe0\x41\x02\xfa\x41%\x12#oslogin.googleapis.com/PosixAccount\x12\x12\n\nproject_id\x18\x02 \x01(\t\x12\x11\n\tsystem_id\x18\x03 \x01(\t"S\n\x16GetSshPublicKeyRequest\x12\x39\n\x04name\x18\x01 \x01(\tB+\xe0\x41\x02\xfa\x41%\n#oslogin.googleapis.com/SshPublicKey"\xb4\x01\n\x19ImportSshPublicKeyRequest\x12;\n\x06parent\x18\x01 \x01(\tB+\xe0\x41\x02\xfa\x41%\x12#oslogin.googleapis.com/SshPublicKey\x12\x46\n\x0essh_public_key\x18\x02 \x01(\x0b\x32).google.cloud.oslogin.common.SshPublicKeyB\x03\xe0\x41\x01\x12\x12\n\nproject_id\x18\x03 \x01(\t"Z\n\x1aImportSshPublicKeyResponse\x12<\n\rlogin_profile\x18\x01 \x01(\x0b\x32%.google.cloud.oslogin.v1.LoginProfile"\xcf\x01\n\x19UpdateSshPublicKeyRequest\x12\x39\n\x04name\x18\x01 \x01(\tB+\xe0\x41\x02\xfa\x41%\n#oslogin.googleapis.com/SshPublicKey\x12\x46\n\x0essh_public_key\x18\x02 \x01(\x0b\x32).google.cloud.oslogin.common.SshPublicKeyB\x03\xe0\x41\x02\x12/\n\x0bupdate_mask\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.FieldMask2\xd0\t\n\x0eOsLoginService\x12\x8e\x01\n\x12\x44\x65letePosixAccount\x12\x32.google.cloud.oslogin.v1.DeletePosixAccountRequest\x1a\x16.google.protobuf.Empty",\x82\xd3\xe4\x93\x02\x1f*\x1d/v1/{name=users/*/projects/*}\xda\x41\x04name\x12\x93\x01\n\x12\x44\x65leteSshPublicKey\x12\x32.google.cloud.oslogin.v1.DeleteSshPublicKeyRequest\x1a\x16.google.protobuf.Empty"1\x82\xd3\xe4\x93\x02$*"/v1/{name=users/*/sshPublicKeys/*}\xda\x41\x04name\x12\x99\x01\n\x0fGetLoginProfile\x12/.google.cloud.oslogin.v1.GetLoginProfileRequest\x1a%.google.cloud.oslogin.v1.LoginProfile".\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{name=users/*}/loginProfile\xda\x41\x04name\x12\xa0\x01\n\x0fGetSshPublicKey\x12/.google.cloud.oslogin.v1.GetSshPublicKeyRequest\x1a).google.cloud.oslogin.common.SshPublicKey"1\x82\xd3\xe4\x93\x02$\x12"/v1/{name=users/*/sshPublicKeys/*}\xda\x41\x04name\x12\xf9\x01\n\x12ImportSshPublicKey\x12\x32.google.cloud.oslogin.v1.ImportSshPublicKeyRequest\x1a\x33.google.cloud.oslogin.v1.ImportSshPublicKeyResponse"z\x82\xd3\xe4\x93\x02\x39"\'/v1/{parent=users/*}:importSshPublicKey:\x0essh_public_key\xda\x41\x15parent,ssh_public_key\xda\x41 parent,ssh_public_key,project_id\x12\xe7\x01\n\x12UpdateSshPublicKey\x12\x32.google.cloud.oslogin.v1.UpdateSshPublicKeyRequest\x1a).google.cloud.oslogin.common.SshPublicKey"r\x82\xd3\xe4\x93\x02\x34\x32"/v1/{name=users/*/sshPublicKeys/*}:\x0essh_public_key\xda\x41\x13name,ssh_public_key\xda\x41\x1fname,ssh_public_key,update_mask\x1ar\xca\x41\x16oslogin.googleapis.com\xd2\x41Vhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/computeB\xa1\x01\n\x1b\x63om.google.cloud.oslogin.v1B\x0cOsLoginProtoP\x01Z>google.golang.org/genproto/googleapis/cloud/oslogin/v1;oslogin\xaa\x02\x17Google.Cloud.OsLogin.V1\xca\x02\x17Google\\Cloud\\OsLogin\\V1b\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_client__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, + google_dot_api_dot_resource__pb2.DESCRIPTOR, google_dot_cloud_dot_oslogin_dot_common_dot_common__pb2.DESCRIPTOR, google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR, @@ -93,8 +100,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=407, - serialized_end=502, + serialized_start=478, + serialized_end=573, ) _LOGINPROFILE = _descriptor.Descriptor( @@ -119,7 +126,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -158,24 +165,6 @@ serialized_options=None, file=DESCRIPTOR, ), - _descriptor.FieldDescriptor( - name="suspended", - full_name="google.cloud.oslogin.v1.LoginProfile.suspended", - index=3, - number=4, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), ], extensions=[], nested_types=[_LOGINPROFILE_SSHPUBLICKEYSENTRY], @@ -185,8 +174,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=208, - serialized_end=502, + serialized_start=293, + serialized_end=573, ) @@ -212,7 +201,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A%\n#oslogin.googleapis.com/PosixAccount" + ), file=DESCRIPTOR, ) ], @@ -224,8 +215,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=504, - serialized_end=545, + serialized_start=575, + serialized_end=661, ) @@ -251,7 +242,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A%\n#oslogin.googleapis.com/SshPublicKey" + ), file=DESCRIPTOR, ) ], @@ -263,8 +256,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=547, - serialized_end=588, + serialized_start=663, + serialized_end=749, ) @@ -290,9 +283,47 @@ containing_type=None, is_extension=False, extension_scope=None, + serialized_options=_b( + "\340A\002\372A%\022#oslogin.googleapis.com/PosixAccount" + ), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="project_id", + full_name="google.cloud.oslogin.v1.GetLoginProfileRequest.project_id", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), + _descriptor.FieldDescriptor( + name="system_id", + full_name="google.cloud.oslogin.v1.GetLoginProfileRequest.system_id", + index=2, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), ], extensions=[], nested_types=[], @@ -302,8 +333,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=590, - serialized_end=628, + serialized_start=751, + serialized_end=873, ) @@ -329,7 +360,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A%\n#oslogin.googleapis.com/SshPublicKey" + ), file=DESCRIPTOR, ) ], @@ -341,8 +374,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=630, - serialized_end=668, + serialized_start=875, + serialized_end=958, ) @@ -368,7 +401,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A%\022#oslogin.googleapis.com/SshPublicKey" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -386,7 +421,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -416,8 +451,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=671, - serialized_end=801, + serialized_start=961, + serialized_end=1141, ) @@ -455,8 +490,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=803, - serialized_end=893, + serialized_start=1143, + serialized_end=1233, ) @@ -482,7 +517,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A%\n#oslogin.googleapis.com/SshPublicKey" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -500,7 +537,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -530,8 +567,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=896, - serialized_end=1053, + serialized_start=1236, + serialized_end=1443, ) _LOGINPROFILE_SSHPUBLICKEYSENTRY.fields_by_name[ @@ -595,15 +632,12 @@ Attributes: name: - The primary email address that uniquely identifies the user. + Required. A unique user ID. posix_accounts: The list of POSIX accounts associated with the user. ssh_public_keys: A map from SSH public key fingerprint to the associated key object. - suspended: - Indicates if the user is suspended. A suspended user cannot - log in but their profile information is retained. """, # @@protoc_insertion_point(class_scope:google.cloud.oslogin.v1.LoginProfile) ), @@ -622,9 +656,9 @@ Attributes: name: - A reference to the POSIX account to update. POSIX accounts are - identified by the project ID they are associated with. A - reference to the POSIX account is in format + Required. A reference to the POSIX account to update. POSIX + accounts are identified by the project ID they are associated + with. A reference to the POSIX account is in format ``users/{user}/projects/{project}``. """, # @@protoc_insertion_point(class_scope:google.cloud.oslogin.v1.DeletePosixAccountRequest) @@ -643,9 +677,9 @@ Attributes: name: - The fingerprint of the public key to update. Public keys are - identified by their SHA-256 fingerprint. The fingerprint of - the public key is in format + Required. The fingerprint of the public key to update. Public + keys are identified by their SHA-256 fingerprint. The + fingerprint of the public key is in format ``users/{user}/sshPublicKeys/{fingerprint}``. """, # @@protoc_insertion_point(class_scope:google.cloud.oslogin.v1.DeleteSshPublicKeyRequest) @@ -665,7 +699,12 @@ Attributes: name: - The unique ID for the user in format ``users/{user}``. + Required. The unique ID for the user in format + ``users/{user}``. + project_id: + The project ID of the Google Cloud Platform project. + system_id: + A system ID for filtering the results of the request. """, # @@protoc_insertion_point(class_scope:google.cloud.oslogin.v1.GetLoginProfileRequest) ), @@ -683,9 +722,9 @@ Attributes: name: - The fingerprint of the public key to retrieve. Public keys are - identified by their SHA-256 fingerprint. The fingerprint of - the public key is in format + Required. The fingerprint of the public key to retrieve. + Public keys are identified by their SHA-256 fingerprint. The + fingerprint of the public key is in format ``users/{user}/sshPublicKeys/{fingerprint}``. """, # @@protoc_insertion_point(class_scope:google.cloud.oslogin.v1.GetSshPublicKeyRequest) @@ -704,9 +743,10 @@ Attributes: parent: - The unique ID for the user in format ``users/{user}``. + Required. The unique ID for the user in format + ``users/{user}``. ssh_public_key: - The SSH public key and expiration time. + Optional. The SSH public key and expiration time. project_id: The project ID of the Google Cloud Platform project. """, @@ -744,12 +784,12 @@ Attributes: name: - The fingerprint of the public key to update. Public keys are - identified by their SHA-256 fingerprint. The fingerprint of - the public key is in format + Required. The fingerprint of the public key to update. Public + keys are identified by their SHA-256 fingerprint. The + fingerprint of the public key is in format ``users/{user}/sshPublicKeys/{fingerprint}``. ssh_public_key: - The SSH public key and expiration time. + Required. The SSH public key and expiration time. update_mask: Mask to control which fields get updated. Updates all if not present. @@ -762,15 +802,26 @@ DESCRIPTOR._options = None _LOGINPROFILE_SSHPUBLICKEYSENTRY._options = None +_LOGINPROFILE.fields_by_name["name"]._options = None +_DELETEPOSIXACCOUNTREQUEST.fields_by_name["name"]._options = None +_DELETESSHPUBLICKEYREQUEST.fields_by_name["name"]._options = None +_GETLOGINPROFILEREQUEST.fields_by_name["name"]._options = None +_GETSSHPUBLICKEYREQUEST.fields_by_name["name"]._options = None +_IMPORTSSHPUBLICKEYREQUEST.fields_by_name["parent"]._options = None +_IMPORTSSHPUBLICKEYREQUEST.fields_by_name["ssh_public_key"]._options = None +_UPDATESSHPUBLICKEYREQUEST.fields_by_name["name"]._options = None +_UPDATESSHPUBLICKEYREQUEST.fields_by_name["ssh_public_key"]._options = None _OSLOGINSERVICE = _descriptor.ServiceDescriptor( name="OsLoginService", full_name="google.cloud.oslogin.v1.OsLoginService", file=DESCRIPTOR, index=0, - serialized_options=None, - serialized_start=1056, - serialized_end=2029, + serialized_options=_b( + "\312A\026oslogin.googleapis.com\322AVhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/compute" + ), + serialized_start=1446, + serialized_end=2678, methods=[ _descriptor.MethodDescriptor( name="DeletePosixAccount", @@ -780,7 +831,7 @@ input_type=_DELETEPOSIXACCOUNTREQUEST, output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, serialized_options=_b( - "\202\323\344\223\002\037*\035/v1/{name=users/*/projects/*}" + "\202\323\344\223\002\037*\035/v1/{name=users/*/projects/*}\332A\004name" ), ), _descriptor.MethodDescriptor( @@ -791,7 +842,7 @@ input_type=_DELETESSHPUBLICKEYREQUEST, output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, serialized_options=_b( - '\202\323\344\223\002$*"/v1/{name=users/*/sshPublicKeys/*}' + '\202\323\344\223\002$*"/v1/{name=users/*/sshPublicKeys/*}\332A\004name' ), ), _descriptor.MethodDescriptor( @@ -802,7 +853,7 @@ input_type=_GETLOGINPROFILEREQUEST, output_type=_LOGINPROFILE, serialized_options=_b( - "\202\323\344\223\002!\022\037/v1/{name=users/*}/loginProfile" + "\202\323\344\223\002!\022\037/v1/{name=users/*}/loginProfile\332A\004name" ), ), _descriptor.MethodDescriptor( @@ -813,7 +864,7 @@ input_type=_GETSSHPUBLICKEYREQUEST, output_type=google_dot_cloud_dot_oslogin_dot_common_dot_common__pb2._SSHPUBLICKEY, serialized_options=_b( - '\202\323\344\223\002$\022"/v1/{name=users/*/sshPublicKeys/*}' + '\202\323\344\223\002$\022"/v1/{name=users/*/sshPublicKeys/*}\332A\004name' ), ), _descriptor.MethodDescriptor( @@ -824,7 +875,7 @@ input_type=_IMPORTSSHPUBLICKEYREQUEST, output_type=_IMPORTSSHPUBLICKEYRESPONSE, serialized_options=_b( - "\202\323\344\223\0029\"'/v1/{parent=users/*}:importSshPublicKey:\016ssh_public_key" + "\202\323\344\223\0029\"'/v1/{parent=users/*}:importSshPublicKey:\016ssh_public_key\332A\025parent,ssh_public_key\332A parent,ssh_public_key,project_id" ), ), _descriptor.MethodDescriptor( @@ -835,7 +886,7 @@ input_type=_UPDATESSHPUBLICKEYREQUEST, output_type=google_dot_cloud_dot_oslogin_dot_common_dot_common__pb2._SSHPUBLICKEY, serialized_options=_b( - '\202\323\344\223\00242"/v1/{name=users/*/sshPublicKeys/*}:\016ssh_public_key' + '\202\323\344\223\00242"/v1/{name=users/*/sshPublicKeys/*}:\016ssh_public_key\332A\023name,ssh_public_key\332A\037name,ssh_public_key,update_mask' ), ), ], diff --git a/oslogin/google/cloud/oslogin_v1/proto/oslogin_v1/proto/oslogin_pb2.py b/oslogin/google/cloud/oslogin_v1/proto/oslogin_v1/proto/oslogin_pb2.py deleted file mode 100644 index 11ba220ecfdc..000000000000 --- a/oslogin/google/cloud/oslogin_v1/proto/oslogin_v1/proto/oslogin_pb2.py +++ /dev/null @@ -1,848 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/oslogin_v1/proto/oslogin.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.cloud.oslogin_v1.proto import ( - common_pb2 as google_dot_cloud_dot_oslogin_dot_common_dot_common__pb2, -) -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 -from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/oslogin_v1/proto/oslogin.proto", - package="google.cloud.oslogin.v1", - syntax="proto3", - serialized_options=_b( - "\n\033com.google.cloud.oslogin.v1B\014OsLoginProtoP\001Z>google.golang.org/genproto/googleapis/cloud/oslogin/v1;oslogin\252\002\027Google.Cloud.OsLogin.V1\312\002\027Google\\Cloud\\OsLogin\\V1" - ), - serialized_pb=_b( - '\n+google/cloud/oslogin_v1/proto/oslogin.proto\x12\x17google.cloud.oslogin.v1\x1a\x1cgoogle/api/annotations.proto\x1a(google/cloud/oslogin/common/common.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto"\xa6\x02\n\x0cLoginProfile\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x41\n\x0eposix_accounts\x18\x02 \x03(\x0b\x32).google.cloud.oslogin.common.PosixAccount\x12Q\n\x0fssh_public_keys\x18\x03 \x03(\x0b\x32\x38.google.cloud.oslogin.v1.LoginProfile.SshPublicKeysEntry\x12\x11\n\tsuspended\x18\x04 \x01(\x08\x1a_\n\x12SshPublicKeysEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x38\n\x05value\x18\x02 \x01(\x0b\x32).google.cloud.oslogin.common.SshPublicKey:\x02\x38\x01")\n\x19\x44\x65letePosixAccountRequest\x12\x0c\n\x04name\x18\x01 \x01(\t")\n\x19\x44\x65leteSshPublicKeyRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"&\n\x16GetLoginProfileRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"&\n\x16GetSshPublicKeyRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"\x82\x01\n\x19ImportSshPublicKeyRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x41\n\x0essh_public_key\x18\x02 \x01(\x0b\x32).google.cloud.oslogin.common.SshPublicKey\x12\x12\n\nproject_id\x18\x03 \x01(\t"Z\n\x1aImportSshPublicKeyResponse\x12<\n\rlogin_profile\x18\x01 \x01(\x0b\x32%.google.cloud.oslogin.v1.LoginProfile"\x9d\x01\n\x19UpdateSshPublicKeyRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x41\n\x0essh_public_key\x18\x02 \x01(\x0b\x32).google.cloud.oslogin.common.SshPublicKey\x12/\n\x0bupdate_mask\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.FieldMask2\xcd\x07\n\x0eOsLoginService\x12\x87\x01\n\x12\x44\x65letePosixAccount\x12\x32.google.cloud.oslogin.v1.DeletePosixAccountRequest\x1a\x16.google.protobuf.Empty"%\x82\xd3\xe4\x93\x02\x1f*\x1d/v1/{name=users/*/projects/*}\x12\x8c\x01\n\x12\x44\x65leteSshPublicKey\x12\x32.google.cloud.oslogin.v1.DeleteSshPublicKeyRequest\x1a\x16.google.protobuf.Empty"*\x82\xd3\xe4\x93\x02$*"/v1/{name=users/*/sshPublicKeys/*}\x12\x92\x01\n\x0fGetLoginProfile\x12/.google.cloud.oslogin.v1.GetLoginProfileRequest\x1a%.google.cloud.oslogin.v1.LoginProfile"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{name=users/*}/loginProfile\x12\x99\x01\n\x0fGetSshPublicKey\x12/.google.cloud.oslogin.v1.GetSshPublicKeyRequest\x1a).google.cloud.oslogin.common.SshPublicKey"*\x82\xd3\xe4\x93\x02$\x12"/v1/{name=users/*/sshPublicKeys/*}\x12\xbe\x01\n\x12ImportSshPublicKey\x12\x32.google.cloud.oslogin.v1.ImportSshPublicKeyRequest\x1a\x33.google.cloud.oslogin.v1.ImportSshPublicKeyResponse"?\x82\xd3\xe4\x93\x02\x39"\'/v1/{parent=users/*}:importSshPublicKey:\x0essh_public_key\x12\xaf\x01\n\x12UpdateSshPublicKey\x12\x32.google.cloud.oslogin.v1.UpdateSshPublicKeyRequest\x1a).google.cloud.oslogin.common.SshPublicKey":\x82\xd3\xe4\x93\x02\x34\x32"/v1/{name=users/*/sshPublicKeys/*}:\x0essh_public_keyB\xa1\x01\n\x1b\x63om.google.cloud.oslogin.v1B\x0cOsLoginProtoP\x01Z>google.golang.org/genproto/googleapis/cloud/oslogin/v1;oslogin\xaa\x02\x17Google.Cloud.OsLogin.V1\xca\x02\x17Google\\Cloud\\OsLogin\\V1b\x06proto3' - ), - dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - google_dot_cloud_dot_oslogin_dot_common_dot_common__pb2.DESCRIPTOR, - google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, - google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR, - ], -) - - -_LOGINPROFILE_SSHPUBLICKEYSENTRY = _descriptor.Descriptor( - name="SshPublicKeysEntry", - full_name="google.cloud.oslogin.v1.LoginProfile.SshPublicKeysEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.cloud.oslogin.v1.LoginProfile.SshPublicKeysEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.cloud.oslogin.v1.LoginProfile.SshPublicKeysEntry.value", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=_b("8\001"), - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=407, - serialized_end=502, -) - -_LOGINPROFILE = _descriptor.Descriptor( - name="LoginProfile", - full_name="google.cloud.oslogin.v1.LoginProfile", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.oslogin.v1.LoginProfile.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="posix_accounts", - full_name="google.cloud.oslogin.v1.LoginProfile.posix_accounts", - index=1, - number=2, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="ssh_public_keys", - full_name="google.cloud.oslogin.v1.LoginProfile.ssh_public_keys", - index=2, - number=3, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="suspended", - full_name="google.cloud.oslogin.v1.LoginProfile.suspended", - index=3, - number=4, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_LOGINPROFILE_SSHPUBLICKEYSENTRY], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=208, - serialized_end=502, -) - - -_DELETEPOSIXACCOUNTREQUEST = _descriptor.Descriptor( - name="DeletePosixAccountRequest", - full_name="google.cloud.oslogin.v1.DeletePosixAccountRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.oslogin.v1.DeletePosixAccountRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=504, - serialized_end=545, -) - - -_DELETESSHPUBLICKEYREQUEST = _descriptor.Descriptor( - name="DeleteSshPublicKeyRequest", - full_name="google.cloud.oslogin.v1.DeleteSshPublicKeyRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.oslogin.v1.DeleteSshPublicKeyRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=547, - serialized_end=588, -) - - -_GETLOGINPROFILEREQUEST = _descriptor.Descriptor( - name="GetLoginProfileRequest", - full_name="google.cloud.oslogin.v1.GetLoginProfileRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.oslogin.v1.GetLoginProfileRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=590, - serialized_end=628, -) - - -_GETSSHPUBLICKEYREQUEST = _descriptor.Descriptor( - name="GetSshPublicKeyRequest", - full_name="google.cloud.oslogin.v1.GetSshPublicKeyRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.oslogin.v1.GetSshPublicKeyRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=630, - serialized_end=668, -) - - -_IMPORTSSHPUBLICKEYREQUEST = _descriptor.Descriptor( - name="ImportSshPublicKeyRequest", - full_name="google.cloud.oslogin.v1.ImportSshPublicKeyRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.cloud.oslogin.v1.ImportSshPublicKeyRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="ssh_public_key", - full_name="google.cloud.oslogin.v1.ImportSshPublicKeyRequest.ssh_public_key", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="project_id", - full_name="google.cloud.oslogin.v1.ImportSshPublicKeyRequest.project_id", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=671, - serialized_end=801, -) - - -_IMPORTSSHPUBLICKEYRESPONSE = _descriptor.Descriptor( - name="ImportSshPublicKeyResponse", - full_name="google.cloud.oslogin.v1.ImportSshPublicKeyResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="login_profile", - full_name="google.cloud.oslogin.v1.ImportSshPublicKeyResponse.login_profile", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=803, - serialized_end=893, -) - - -_UPDATESSHPUBLICKEYREQUEST = _descriptor.Descriptor( - name="UpdateSshPublicKeyRequest", - full_name="google.cloud.oslogin.v1.UpdateSshPublicKeyRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.cloud.oslogin.v1.UpdateSshPublicKeyRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="ssh_public_key", - full_name="google.cloud.oslogin.v1.UpdateSshPublicKeyRequest.ssh_public_key", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="update_mask", - full_name="google.cloud.oslogin.v1.UpdateSshPublicKeyRequest.update_mask", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=896, - serialized_end=1053, -) - -_LOGINPROFILE_SSHPUBLICKEYSENTRY.fields_by_name[ - "value" -].message_type = google_dot_cloud_dot_oslogin_dot_common_dot_common__pb2._SSHPUBLICKEY -_LOGINPROFILE_SSHPUBLICKEYSENTRY.containing_type = _LOGINPROFILE -_LOGINPROFILE.fields_by_name[ - "posix_accounts" -].message_type = google_dot_cloud_dot_oslogin_dot_common_dot_common__pb2._POSIXACCOUNT -_LOGINPROFILE.fields_by_name[ - "ssh_public_keys" -].message_type = _LOGINPROFILE_SSHPUBLICKEYSENTRY -_IMPORTSSHPUBLICKEYREQUEST.fields_by_name[ - "ssh_public_key" -].message_type = google_dot_cloud_dot_oslogin_dot_common_dot_common__pb2._SSHPUBLICKEY -_IMPORTSSHPUBLICKEYRESPONSE.fields_by_name["login_profile"].message_type = _LOGINPROFILE -_UPDATESSHPUBLICKEYREQUEST.fields_by_name[ - "ssh_public_key" -].message_type = google_dot_cloud_dot_oslogin_dot_common_dot_common__pb2._SSHPUBLICKEY -_UPDATESSHPUBLICKEYREQUEST.fields_by_name[ - "update_mask" -].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK -DESCRIPTOR.message_types_by_name["LoginProfile"] = _LOGINPROFILE -DESCRIPTOR.message_types_by_name[ - "DeletePosixAccountRequest" -] = _DELETEPOSIXACCOUNTREQUEST -DESCRIPTOR.message_types_by_name[ - "DeleteSshPublicKeyRequest" -] = _DELETESSHPUBLICKEYREQUEST -DESCRIPTOR.message_types_by_name["GetLoginProfileRequest"] = _GETLOGINPROFILEREQUEST -DESCRIPTOR.message_types_by_name["GetSshPublicKeyRequest"] = _GETSSHPUBLICKEYREQUEST -DESCRIPTOR.message_types_by_name[ - "ImportSshPublicKeyRequest" -] = _IMPORTSSHPUBLICKEYREQUEST -DESCRIPTOR.message_types_by_name[ - "ImportSshPublicKeyResponse" -] = _IMPORTSSHPUBLICKEYRESPONSE -DESCRIPTOR.message_types_by_name[ - "UpdateSshPublicKeyRequest" -] = _UPDATESSHPUBLICKEYREQUEST -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -LoginProfile = _reflection.GeneratedProtocolMessageType( - "LoginProfile", - (_message.Message,), - dict( - SshPublicKeysEntry=_reflection.GeneratedProtocolMessageType( - "SshPublicKeysEntry", - (_message.Message,), - dict( - DESCRIPTOR=_LOGINPROFILE_SSHPUBLICKEYSENTRY, - __module__="google.cloud.oslogin_v1.proto.oslogin_pb2" - # @@protoc_insertion_point(class_scope:google.cloud.oslogin.v1.LoginProfile.SshPublicKeysEntry) - ), - ), - DESCRIPTOR=_LOGINPROFILE, - __module__="google.cloud.oslogin_v1.proto.oslogin_pb2", - __doc__="""The user profile information used for logging in to a virtual machine on - Google Compute Engine. - - - Attributes: - name: - The primary email address that uniquely identifies the user. - posix_accounts: - The list of POSIX accounts associated with the user. - ssh_public_keys: - A map from SSH public key fingerprint to the associated key - object. - suspended: - Indicates if the user is suspended. A suspended user cannot - log in but their profile information is retained. - """, - # @@protoc_insertion_point(class_scope:google.cloud.oslogin.v1.LoginProfile) - ), -) -_sym_db.RegisterMessage(LoginProfile) -_sym_db.RegisterMessage(LoginProfile.SshPublicKeysEntry) - -DeletePosixAccountRequest = _reflection.GeneratedProtocolMessageType( - "DeletePosixAccountRequest", - (_message.Message,), - dict( - DESCRIPTOR=_DELETEPOSIXACCOUNTREQUEST, - __module__="google.cloud.oslogin_v1.proto.oslogin_pb2", - __doc__="""A request message for deleting a POSIX account entry. - - - Attributes: - name: - A reference to the POSIX account to update. POSIX accounts are - identified by the project ID they are associated with. A - reference to the POSIX account is in format - ``users/{user}/projects/{project}``. - """, - # @@protoc_insertion_point(class_scope:google.cloud.oslogin.v1.DeletePosixAccountRequest) - ), -) -_sym_db.RegisterMessage(DeletePosixAccountRequest) - -DeleteSshPublicKeyRequest = _reflection.GeneratedProtocolMessageType( - "DeleteSshPublicKeyRequest", - (_message.Message,), - dict( - DESCRIPTOR=_DELETESSHPUBLICKEYREQUEST, - __module__="google.cloud.oslogin_v1.proto.oslogin_pb2", - __doc__="""A request message for deleting an SSH public key. - - - Attributes: - name: - The fingerprint of the public key to update. Public keys are - identified by their SHA-256 fingerprint. The fingerprint of - the public key is in format - ``users/{user}/sshPublicKeys/{fingerprint}``. - """, - # @@protoc_insertion_point(class_scope:google.cloud.oslogin.v1.DeleteSshPublicKeyRequest) - ), -) -_sym_db.RegisterMessage(DeleteSshPublicKeyRequest) - -GetLoginProfileRequest = _reflection.GeneratedProtocolMessageType( - "GetLoginProfileRequest", - (_message.Message,), - dict( - DESCRIPTOR=_GETLOGINPROFILEREQUEST, - __module__="google.cloud.oslogin_v1.proto.oslogin_pb2", - __doc__="""A request message for retrieving the login profile information for a - user. - - - Attributes: - name: - The unique ID for the user in format ``users/{user}``. - """, - # @@protoc_insertion_point(class_scope:google.cloud.oslogin.v1.GetLoginProfileRequest) - ), -) -_sym_db.RegisterMessage(GetLoginProfileRequest) - -GetSshPublicKeyRequest = _reflection.GeneratedProtocolMessageType( - "GetSshPublicKeyRequest", - (_message.Message,), - dict( - DESCRIPTOR=_GETSSHPUBLICKEYREQUEST, - __module__="google.cloud.oslogin_v1.proto.oslogin_pb2", - __doc__="""A request message for retrieving an SSH public key. - - - Attributes: - name: - The fingerprint of the public key to retrieve. Public keys are - identified by their SHA-256 fingerprint. The fingerprint of - the public key is in format - ``users/{user}/sshPublicKeys/{fingerprint}``. - """, - # @@protoc_insertion_point(class_scope:google.cloud.oslogin.v1.GetSshPublicKeyRequest) - ), -) -_sym_db.RegisterMessage(GetSshPublicKeyRequest) - -ImportSshPublicKeyRequest = _reflection.GeneratedProtocolMessageType( - "ImportSshPublicKeyRequest", - (_message.Message,), - dict( - DESCRIPTOR=_IMPORTSSHPUBLICKEYREQUEST, - __module__="google.cloud.oslogin_v1.proto.oslogin_pb2", - __doc__="""A request message for importing an SSH public key. - - - Attributes: - parent: - The unique ID for the user in format ``users/{user}``. - ssh_public_key: - The SSH public key and expiration time. - project_id: - The project ID of the Google Cloud Platform project. - """, - # @@protoc_insertion_point(class_scope:google.cloud.oslogin.v1.ImportSshPublicKeyRequest) - ), -) -_sym_db.RegisterMessage(ImportSshPublicKeyRequest) - -ImportSshPublicKeyResponse = _reflection.GeneratedProtocolMessageType( - "ImportSshPublicKeyResponse", - (_message.Message,), - dict( - DESCRIPTOR=_IMPORTSSHPUBLICKEYRESPONSE, - __module__="google.cloud.oslogin_v1.proto.oslogin_pb2", - __doc__="""A response message for importing an SSH public key. - - - Attributes: - login_profile: - The login profile information for the user. - """, - # @@protoc_insertion_point(class_scope:google.cloud.oslogin.v1.ImportSshPublicKeyResponse) - ), -) -_sym_db.RegisterMessage(ImportSshPublicKeyResponse) - -UpdateSshPublicKeyRequest = _reflection.GeneratedProtocolMessageType( - "UpdateSshPublicKeyRequest", - (_message.Message,), - dict( - DESCRIPTOR=_UPDATESSHPUBLICKEYREQUEST, - __module__="google.cloud.oslogin_v1.proto.oslogin_pb2", - __doc__="""A request message for updating an SSH public key. - - - Attributes: - name: - The fingerprint of the public key to update. Public keys are - identified by their SHA-256 fingerprint. The fingerprint of - the public key is in format - ``users/{user}/sshPublicKeys/{fingerprint}``. - ssh_public_key: - The SSH public key and expiration time. - update_mask: - Mask to control which fields get updated. Updates all if not - present. - """, - # @@protoc_insertion_point(class_scope:google.cloud.oslogin.v1.UpdateSshPublicKeyRequest) - ), -) -_sym_db.RegisterMessage(UpdateSshPublicKeyRequest) - - -DESCRIPTOR._options = None -_LOGINPROFILE_SSHPUBLICKEYSENTRY._options = None - -_OSLOGINSERVICE = _descriptor.ServiceDescriptor( - name="OsLoginService", - full_name="google.cloud.oslogin.v1.OsLoginService", - file=DESCRIPTOR, - index=0, - serialized_options=None, - serialized_start=1056, - serialized_end=2029, - methods=[ - _descriptor.MethodDescriptor( - name="DeletePosixAccount", - full_name="google.cloud.oslogin.v1.OsLoginService.DeletePosixAccount", - index=0, - containing_service=None, - input_type=_DELETEPOSIXACCOUNTREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=_b( - "\202\323\344\223\002\037*\035/v1/{name=users/*/projects/*}" - ), - ), - _descriptor.MethodDescriptor( - name="DeleteSshPublicKey", - full_name="google.cloud.oslogin.v1.OsLoginService.DeleteSshPublicKey", - index=1, - containing_service=None, - input_type=_DELETESSHPUBLICKEYREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=_b( - '\202\323\344\223\002$*"/v1/{name=users/*/sshPublicKeys/*}' - ), - ), - _descriptor.MethodDescriptor( - name="GetLoginProfile", - full_name="google.cloud.oslogin.v1.OsLoginService.GetLoginProfile", - index=2, - containing_service=None, - input_type=_GETLOGINPROFILEREQUEST, - output_type=_LOGINPROFILE, - serialized_options=_b( - "\202\323\344\223\002!\022\037/v1/{name=users/*}/loginProfile" - ), - ), - _descriptor.MethodDescriptor( - name="GetSshPublicKey", - full_name="google.cloud.oslogin.v1.OsLoginService.GetSshPublicKey", - index=3, - containing_service=None, - input_type=_GETSSHPUBLICKEYREQUEST, - output_type=google_dot_cloud_dot_oslogin_dot_common_dot_common__pb2._SSHPUBLICKEY, - serialized_options=_b( - '\202\323\344\223\002$\022"/v1/{name=users/*/sshPublicKeys/*}' - ), - ), - _descriptor.MethodDescriptor( - name="ImportSshPublicKey", - full_name="google.cloud.oslogin.v1.OsLoginService.ImportSshPublicKey", - index=4, - containing_service=None, - input_type=_IMPORTSSHPUBLICKEYREQUEST, - output_type=_IMPORTSSHPUBLICKEYRESPONSE, - serialized_options=_b( - "\202\323\344\223\0029\"'/v1/{parent=users/*}:importSshPublicKey:\016ssh_public_key" - ), - ), - _descriptor.MethodDescriptor( - name="UpdateSshPublicKey", - full_name="google.cloud.oslogin.v1.OsLoginService.UpdateSshPublicKey", - index=5, - containing_service=None, - input_type=_UPDATESSHPUBLICKEYREQUEST, - output_type=google_dot_cloud_dot_oslogin_dot_common_dot_common__pb2._SSHPUBLICKEY, - serialized_options=_b( - '\202\323\344\223\00242"/v1/{name=users/*/sshPublicKeys/*}:\016ssh_public_key' - ), - ), - ], -) -_sym_db.RegisterServiceDescriptor(_OSLOGINSERVICE) - -DESCRIPTOR.services_by_name["OsLoginService"] = _OSLOGINSERVICE - -# @@protoc_insertion_point(module_scope) diff --git a/oslogin/google/cloud/oslogin_v1/proto/oslogin_v1/proto/oslogin_pb2_grpc.py b/oslogin/google/cloud/oslogin_v1/proto/oslogin_v1/proto/oslogin_pb2_grpc.py deleted file mode 100644 index 6b002666a67b..000000000000 --- a/oslogin/google/cloud/oslogin_v1/proto/oslogin_v1/proto/oslogin_pb2_grpc.py +++ /dev/null @@ -1,148 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc - -from google.cloud.oslogin_v1.proto import ( - common_pb2 as google_dot_cloud_dot_oslogin_dot_common_dot_common__pb2, -) -from google.cloud.oslogin_v1.proto import ( - oslogin_pb2 as google_dot_cloud_dot_oslogin__v1_dot_proto_dot_oslogin__pb2, -) -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 - - -class OsLoginServiceStub(object): - """Cloud OS Login API - - The Cloud OS Login API allows you to manage users and their associated SSH - public keys for logging into virtual machines on Google Cloud Platform. - """ - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.DeletePosixAccount = channel.unary_unary( - "/google.cloud.oslogin.v1.OsLoginService/DeletePosixAccount", - request_serializer=google_dot_cloud_dot_oslogin__v1_dot_proto_dot_oslogin__pb2.DeletePosixAccountRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.DeleteSshPublicKey = channel.unary_unary( - "/google.cloud.oslogin.v1.OsLoginService/DeleteSshPublicKey", - request_serializer=google_dot_cloud_dot_oslogin__v1_dot_proto_dot_oslogin__pb2.DeleteSshPublicKeyRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.GetLoginProfile = channel.unary_unary( - "/google.cloud.oslogin.v1.OsLoginService/GetLoginProfile", - request_serializer=google_dot_cloud_dot_oslogin__v1_dot_proto_dot_oslogin__pb2.GetLoginProfileRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_oslogin__v1_dot_proto_dot_oslogin__pb2.LoginProfile.FromString, - ) - self.GetSshPublicKey = channel.unary_unary( - "/google.cloud.oslogin.v1.OsLoginService/GetSshPublicKey", - request_serializer=google_dot_cloud_dot_oslogin__v1_dot_proto_dot_oslogin__pb2.GetSshPublicKeyRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_oslogin_dot_common_dot_common__pb2.SshPublicKey.FromString, - ) - self.ImportSshPublicKey = channel.unary_unary( - "/google.cloud.oslogin.v1.OsLoginService/ImportSshPublicKey", - request_serializer=google_dot_cloud_dot_oslogin__v1_dot_proto_dot_oslogin__pb2.ImportSshPublicKeyRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_oslogin__v1_dot_proto_dot_oslogin__pb2.ImportSshPublicKeyResponse.FromString, - ) - self.UpdateSshPublicKey = channel.unary_unary( - "/google.cloud.oslogin.v1.OsLoginService/UpdateSshPublicKey", - request_serializer=google_dot_cloud_dot_oslogin__v1_dot_proto_dot_oslogin__pb2.UpdateSshPublicKeyRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_oslogin_dot_common_dot_common__pb2.SshPublicKey.FromString, - ) - - -class OsLoginServiceServicer(object): - """Cloud OS Login API - - The Cloud OS Login API allows you to manage users and their associated SSH - public keys for logging into virtual machines on Google Cloud Platform. - """ - - def DeletePosixAccount(self, request, context): - """Deletes a POSIX account. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def DeleteSshPublicKey(self, request, context): - """Deletes an SSH public key. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def GetLoginProfile(self, request, context): - """Retrieves the profile information used for logging in to a virtual machine - on Google Compute Engine. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def GetSshPublicKey(self, request, context): - """Retrieves an SSH public key. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ImportSshPublicKey(self, request, context): - """Adds an SSH public key and returns the profile information. Default POSIX - account information is set when no username and UID exist as part of the - login profile. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def UpdateSshPublicKey(self, request, context): - """Updates an SSH public key and returns the profile information. This method - supports patch semantics. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - -def add_OsLoginServiceServicer_to_server(servicer, server): - rpc_method_handlers = { - "DeletePosixAccount": grpc.unary_unary_rpc_method_handler( - servicer.DeletePosixAccount, - request_deserializer=google_dot_cloud_dot_oslogin__v1_dot_proto_dot_oslogin__pb2.DeletePosixAccountRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - "DeleteSshPublicKey": grpc.unary_unary_rpc_method_handler( - servicer.DeleteSshPublicKey, - request_deserializer=google_dot_cloud_dot_oslogin__v1_dot_proto_dot_oslogin__pb2.DeleteSshPublicKeyRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - "GetLoginProfile": grpc.unary_unary_rpc_method_handler( - servicer.GetLoginProfile, - request_deserializer=google_dot_cloud_dot_oslogin__v1_dot_proto_dot_oslogin__pb2.GetLoginProfileRequest.FromString, - response_serializer=google_dot_cloud_dot_oslogin__v1_dot_proto_dot_oslogin__pb2.LoginProfile.SerializeToString, - ), - "GetSshPublicKey": grpc.unary_unary_rpc_method_handler( - servicer.GetSshPublicKey, - request_deserializer=google_dot_cloud_dot_oslogin__v1_dot_proto_dot_oslogin__pb2.GetSshPublicKeyRequest.FromString, - response_serializer=google_dot_cloud_dot_oslogin_dot_common_dot_common__pb2.SshPublicKey.SerializeToString, - ), - "ImportSshPublicKey": grpc.unary_unary_rpc_method_handler( - servicer.ImportSshPublicKey, - request_deserializer=google_dot_cloud_dot_oslogin__v1_dot_proto_dot_oslogin__pb2.ImportSshPublicKeyRequest.FromString, - response_serializer=google_dot_cloud_dot_oslogin__v1_dot_proto_dot_oslogin__pb2.ImportSshPublicKeyResponse.SerializeToString, - ), - "UpdateSshPublicKey": grpc.unary_unary_rpc_method_handler( - servicer.UpdateSshPublicKey, - request_deserializer=google_dot_cloud_dot_oslogin__v1_dot_proto_dot_oslogin__pb2.UpdateSshPublicKeyRequest.FromString, - response_serializer=google_dot_cloud_dot_oslogin_dot_common_dot_common__pb2.SshPublicKey.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - "google.cloud.oslogin.v1.OsLoginService", rpc_method_handlers - ) - server.add_generic_rpc_handlers((generic_handler,)) diff --git a/oslogin/synth.metadata b/oslogin/synth.metadata index 56d7ccd28794..cb189d0cc1d0 100644 --- a/oslogin/synth.metadata +++ b/oslogin/synth.metadata @@ -1,26 +1,26 @@ { - "updateTime": "2019-08-06T12:33:01.454928Z", + "updateTime": "2019-10-25T21:10:32.534388Z", "sources": [ { "generator": { "name": "artman", - "version": "0.32.1", - "dockerImage": "googleapis/artman@sha256:a684d40ba9a4e15946f5f2ca6b4bd9fe301192f522e9de4fff622118775f309b" + "version": "0.40.2", + "dockerImage": "googleapis/artman@sha256:3b8f7d9b4c206843ce08053474f5c64ae4d388ff7d995e68b59fb65edf73eeb9" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "e699b0cba64ffddfae39633417180f1f65875896", - "internalRef": "261759677" + "sha": "d27a44798506d28e8e6d874bd128da43f45f74c4", + "internalRef": "276716410" } }, { "template": { "name": "python_library", "origin": "synthtool.gcp", - "version": "2019.5.2" + "version": "2019.10.17" } } ], diff --git a/oslogin/synth.py b/oslogin/synth.py index 7e76e18b1c4e..4bcb94cadb59 100644 --- a/oslogin/synth.py +++ b/oslogin/synth.py @@ -29,9 +29,20 @@ artman_output_name="os-login-v1", include_protos=True, ) +# pb2's are incorrectly generated into deeper directories, so copy separately into proto/ +s.move( + library, + excludes=[ + "nox.py", + "setup.py", + "README.rst", + "docs/index.rst", + "**/proto/oslogin/**", + "**/proto/oslogin_v1/**", + ], +) +s.move(library / "google/cloud/oslogin_v1/proto/**/*", "google/cloud/oslogin_v1/proto") -s.move(library / "google/cloud/oslogin_v1") -s.move(library / "tests/unit/gapic/v1") # Fix up imports s.replace( diff --git a/oslogin/tests/unit/gapic/v1/test_os_login_service_client_v1.py b/oslogin/tests/unit/gapic/v1/test_os_login_service_client_v1.py index 18506c0d6859..a912a5780d3a 100644 --- a/oslogin/tests/unit/gapic/v1/test_os_login_service_client_v1.py +++ b/oslogin/tests/unit/gapic/v1/test_os_login_service_client_v1.py @@ -70,7 +70,7 @@ def test_delete_posix_account(self): client = oslogin_v1.OsLoginServiceClient() # Setup Request - name = client.project_path("[USER]", "[PROJECT]") + name = client.posix_account_path("[USER]", "[PROJECT]") client.delete_posix_account(name) @@ -88,7 +88,7 @@ def test_delete_posix_account_exception(self): client = oslogin_v1.OsLoginServiceClient() # Setup request - name = client.project_path("[USER]", "[PROJECT]") + name = client.posix_account_path("[USER]", "[PROJECT]") with pytest.raises(CustomException): client.delete_posix_account(name) @@ -101,7 +101,7 @@ def test_delete_ssh_public_key(self): client = oslogin_v1.OsLoginServiceClient() # Setup Request - name = client.fingerprint_path("[USER]", "[FINGERPRINT]") + name = client.ssh_public_key_path("[USER]", "[FINGERPRINT]") client.delete_ssh_public_key(name) @@ -119,7 +119,7 @@ def test_delete_ssh_public_key_exception(self): client = oslogin_v1.OsLoginServiceClient() # Setup request - name = client.fingerprint_path("[USER]", "[FINGERPRINT]") + name = client.ssh_public_key_path("[USER]", "[FINGERPRINT]") with pytest.raises(CustomException): client.delete_ssh_public_key(name) @@ -127,8 +127,7 @@ def test_delete_ssh_public_key_exception(self): def test_get_login_profile(self): # Setup Expected Response name_2 = "name2-1052831874" - suspended = False - expected_response = {"name": name_2, "suspended": suspended} + expected_response = {"name": name_2} expected_response = oslogin_pb2.LoginProfile(**expected_response) # Mock the API response @@ -168,10 +167,12 @@ def test_get_ssh_public_key(self): key = "key106079" expiration_time_usec = 2058878882 fingerprint = "fingerprint-1375934236" + name_2 = "name2-1052831874" expected_response = { "key": key, "expiration_time_usec": expiration_time_usec, "fingerprint": fingerprint, + "name": name_2, } expected_response = common_pb2.SshPublicKey(**expected_response) @@ -183,7 +184,7 @@ def test_get_ssh_public_key(self): client = oslogin_v1.OsLoginServiceClient() # Setup Request - name = client.fingerprint_path("[USER]", "[FINGERPRINT]") + name = client.ssh_public_key_path("[USER]", "[FINGERPRINT]") response = client.get_ssh_public_key(name) assert expected_response == response @@ -202,7 +203,7 @@ def test_get_ssh_public_key_exception(self): client = oslogin_v1.OsLoginServiceClient() # Setup request - name = client.fingerprint_path("[USER]", "[FINGERPRINT]") + name = client.ssh_public_key_path("[USER]", "[FINGERPRINT]") with pytest.raises(CustomException): client.get_ssh_public_key(name) @@ -221,15 +222,12 @@ def test_import_ssh_public_key(self): # Setup Request parent = client.user_path("[USER]") - ssh_public_key = {} - response = client.import_ssh_public_key(parent, ssh_public_key) + response = client.import_ssh_public_key(parent) assert expected_response == response assert len(channel.requests) == 1 - expected_request = oslogin_pb2.ImportSshPublicKeyRequest( - parent=parent, ssh_public_key=ssh_public_key - ) + expected_request = oslogin_pb2.ImportSshPublicKeyRequest(parent=parent) actual_request = channel.requests[0][1] assert expected_request == actual_request @@ -243,20 +241,21 @@ def test_import_ssh_public_key_exception(self): # Setup request parent = client.user_path("[USER]") - ssh_public_key = {} with pytest.raises(CustomException): - client.import_ssh_public_key(parent, ssh_public_key) + client.import_ssh_public_key(parent) def test_update_ssh_public_key(self): # Setup Expected Response key = "key106079" expiration_time_usec = 2058878882 fingerprint = "fingerprint-1375934236" + name_2 = "name2-1052831874" expected_response = { "key": key, "expiration_time_usec": expiration_time_usec, "fingerprint": fingerprint, + "name": name_2, } expected_response = common_pb2.SshPublicKey(**expected_response) @@ -268,7 +267,7 @@ def test_update_ssh_public_key(self): client = oslogin_v1.OsLoginServiceClient() # Setup Request - name = client.fingerprint_path("[USER]", "[FINGERPRINT]") + name = client.ssh_public_key_path("[USER]", "[FINGERPRINT]") ssh_public_key = {} response = client.update_ssh_public_key(name, ssh_public_key) @@ -290,7 +289,7 @@ def test_update_ssh_public_key_exception(self): client = oslogin_v1.OsLoginServiceClient() # Setup request - name = client.fingerprint_path("[USER]", "[FINGERPRINT]") + name = client.ssh_public_key_path("[USER]", "[FINGERPRINT]") ssh_public_key = {} with pytest.raises(CustomException): diff --git a/phishingprotection/CHANGELOG.md b/phishingprotection/CHANGELOG.md index 5054a4350d8f..d26be3227078 100644 --- a/phishingprotection/CHANGELOG.md +++ b/phishingprotection/CHANGELOG.md @@ -4,6 +4,34 @@ [1]: https://pypi.org/project/google-cloud-phishing-protection/#history + +## 0.2.0 + +10-10-2019 15:30 PDT + +### Implementation Changes +- Use correct release status. ([#9451](https://github.com/googleapis/google-cloud-python/pull/9451)) +- Remove send / receive message size limit (via synth). ([#8963](https://github.com/googleapis/google-cloud-python/pull/8963)) +- Add `client_options` support, re-template / blacken files. ([#8539](https://github.com/googleapis/google-cloud-python/pull/8539)) +- Fix dist name used to compute `gapic_version`. ([#8100](https://github.com/googleapis/google-cloud-python/pull/8100)) +- Remove retries for `DEADLINE_EXCEEDED` (via synth). ([#7889](https://github.com/googleapis/google-cloud-python/pull/7889)) + +### Dependencies +- Bump minimum version for google-api-core to 1.14.0. ([#8709](https://github.com/googleapis/google-cloud-python/pull/8709)) + +### Documentation +- Change requests intersphinx url (via synth). ([#9407](https://github.com/googleapis/google-cloud-python/pull/9407)) +- Update docstrings (via synth). ([#9350](https://github.com/googleapis/google-cloud-python/pull/9350)) +- Fix intersphinx reference to requests. ([#9294](https://github.com/googleapis/google-cloud-python/pull/9294)) +- Remove CI for gh-pages, use googleapis.dev for `api_core` refs. ([#9085](https://github.com/googleapis/google-cloud-python/pull/9085)) +- Normalize docs. ([#8994](https://github.com/googleapis/google-cloud-python/pull/8994)) +- Update intersphinx mapping for requests. ([#8805](https://github.com/googleapis/google-cloud-python/pull/8805)) +- Link to googleapis.dev documentation in READMEs. ([#8705](https://github.com/googleapis/google-cloud-python/pull/8705)) +- Add docs job to publish to googleapis.dev. ([#8464](https://github.com/googleapis/google-cloud-python/pull/8464)) + +### Internal / Testing Changes +- Pin black version (via synth). ([#8590](https://github.com/googleapis/google-cloud-python/pull/8590)) + ## 0.1.0 04-30-2019 15:03 PDT diff --git a/phishingprotection/docs/conf.py b/phishingprotection/docs/conf.py index c8631ce1d467..7ecb748d61e6 100644 --- a/phishingprotection/docs/conf.py +++ b/phishingprotection/docs/conf.py @@ -344,7 +344,7 @@ "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("https://requests.kennethreitz.org/en/stable/", None), + "requests": ("https://requests.kennethreitz.org/en/master/", None), "fastavro": ("https://fastavro.readthedocs.io/en/stable/", None), "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), } diff --git a/phishingprotection/google/cloud/phishingprotection_v1beta1/gapic/phishing_protection_service_client.py b/phishingprotection/google/cloud/phishingprotection_v1beta1/gapic/phishing_protection_service_client.py index add1fc161b35..e0b48c8f8195 100644 --- a/phishingprotection/google/cloud/phishingprotection_v1beta1/gapic/phishing_protection_service_client.py +++ b/phishingprotection/google/cloud/phishingprotection_v1beta1/gapic/phishing_protection_service_client.py @@ -206,9 +206,10 @@ def report_phishing( ): """ Reports a URI suspected of containing phishing content to be reviewed. - Once the report review is completed, if its result verifies the - existince of malicious phishing content, the site will be added the to - `Google's Social Engineering + Once the report review is complete, its result can be found in the Cloud + Security Command Center findings dashboard for Phishing Protection. If + the result verifies the existence of malicious phishing content, the + site will be added the to `Google's Social Engineering lists `__ in order to protect users that could get exposed to this threat in the future. @@ -228,7 +229,7 @@ def report_phishing( Args: parent (str): Required. The name of the project for which the report will be created, in the format "projects/{project\_number}". - uri (str): The URI that is being reported for phishing content to be analyzed. + uri (str): Required. The URI that is being reported for phishing content to be analyzed. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will be retried using a default configuration. diff --git a/phishingprotection/google/cloud/phishingprotection_v1beta1/gapic/transports/phishing_protection_service_grpc_transport.py b/phishingprotection/google/cloud/phishingprotection_v1beta1/gapic/transports/phishing_protection_service_grpc_transport.py index 8498f361b1ac..59150898b6ed 100644 --- a/phishingprotection/google/cloud/phishingprotection_v1beta1/gapic/transports/phishing_protection_service_grpc_transport.py +++ b/phishingprotection/google/cloud/phishingprotection_v1beta1/gapic/transports/phishing_protection_service_grpc_transport.py @@ -117,9 +117,10 @@ def report_phishing(self): """Return the gRPC stub for :meth:`PhishingProtectionServiceClient.report_phishing`. Reports a URI suspected of containing phishing content to be reviewed. - Once the report review is completed, if its result verifies the - existince of malicious phishing content, the site will be added the to - `Google's Social Engineering + Once the report review is complete, its result can be found in the Cloud + Security Command Center findings dashboard for Phishing Protection. If + the result verifies the existence of malicious phishing content, the + site will be added the to `Google's Social Engineering lists `__ in order to protect users that could get exposed to this threat in the future. diff --git a/phishingprotection/google/cloud/phishingprotection_v1beta1/proto/phishingprotection.proto b/phishingprotection/google/cloud/phishingprotection_v1beta1/proto/phishingprotection.proto index 62a9fd03e3df..ce0c3624d818 100644 --- a/phishingprotection/google/cloud/phishingprotection_v1beta1/proto/phishingprotection.proto +++ b/phishingprotection/google/cloud/phishingprotection_v1beta1/proto/phishingprotection.proto @@ -18,6 +18,9 @@ syntax = "proto3"; package google.cloud.phishingprotection.v1beta1; import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; option csharp_namespace = "Google.Cloud.PhishingProtection.V1Beta1"; option go_package = "google.golang.org/genproto/googleapis/cloud/phishingprotection/v1beta1;phishingprotection"; @@ -29,17 +32,22 @@ option php_namespace = "Google\\Cloud\\PhishingProtection\\V1beta1"; // Service to report phishing URIs. service PhishingProtectionServiceV1Beta1 { + option (google.api.default_host) = "phishingprotection.googleapis.com"; + option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; + // Reports a URI suspected of containing phishing content to be reviewed. Once - // the report review is completed, if its result verifies the existince of - // malicious phishing content, the site will be added the to [Google's Social - // Engineering lists](https://support.google.com/webmasters/answer/6350487/) - // in order to protect users that could get exposed to this threat in - // the future. + // the report review is complete, its result can be found in the Cloud + // Security Command Center findings dashboard for Phishing Protection. If the + // result verifies the existence of malicious phishing content, the site will + // be added the to [Google's Social Engineering + // lists](https://support.google.com/webmasters/answer/6350487/) in order to + // protect users that could get exposed to this threat in the future. rpc ReportPhishing(ReportPhishingRequest) returns (ReportPhishingResponse) { option (google.api.http) = { post: "/v1beta1/{parent=projects/*}/phishing:report" body: "*" }; + option (google.api.method_signature) = "parent,uri"; } } @@ -47,11 +55,18 @@ service PhishingProtectionServiceV1Beta1 { message ReportPhishingRequest { // Required. The name of the project for which the report will be created, // in the format "projects/{project_number}". - string parent = 1; + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "cloudresourcemanager.googleapis.com/Project" + } + ]; - // The URI that is being reported for phishing content to be analyzed. - string uri = 2; + // Required. The URI that is being reported for phishing content to be analyzed. + string uri = 2 [(google.api.field_behavior) = REQUIRED]; } // The ReportPhishing (empty) response message. -message ReportPhishingResponse {} +message ReportPhishingResponse { + +} diff --git a/phishingprotection/google/cloud/phishingprotection_v1beta1/proto/phishingprotection_pb2.py b/phishingprotection/google/cloud/phishingprotection_v1beta1/proto/phishingprotection_pb2.py index 91b33898f9cf..72631da0878f 100644 --- a/phishingprotection/google/cloud/phishingprotection_v1beta1/proto/phishingprotection_pb2.py +++ b/phishingprotection/google/cloud/phishingprotection_v1beta1/proto/phishingprotection_pb2.py @@ -16,6 +16,9 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 +from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 DESCRIPTOR = _descriptor.FileDescriptor( @@ -26,9 +29,14 @@ "\n%com.google.phishingprotection.v1beta1B\027PhishingProtectionProtoP\001ZYgoogle.golang.org/genproto/googleapis/cloud/phishingprotection/v1beta1;phishingprotection\242\002\004GCPP\252\002'Google.Cloud.PhishingProtection.V1Beta1\312\002'Google\\Cloud\\PhishingProtection\\V1beta1" ), serialized_pb=_b( - '\nFgoogle/cloud/phishingprotection_v1beta1/proto/phishingprotection.proto\x12\'google.cloud.phishingprotection.v1beta1\x1a\x1cgoogle/api/annotations.proto"4\n\x15ReportPhishingRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x0b\n\x03uri\x18\x02 \x01(\t"\x18\n\x16ReportPhishingResponse2\xef\x01\n PhishingProtectionServiceV1Beta1\x12\xca\x01\n\x0eReportPhishing\x12>.google.cloud.phishingprotection.v1beta1.ReportPhishingRequest\x1a?.google.cloud.phishingprotection.v1beta1.ReportPhishingResponse"7\x82\xd3\xe4\x93\x02\x31",/v1beta1/{parent=projects/*}/phishing:report:\x01*B\xf8\x01\n%com.google.phishingprotection.v1beta1B\x17PhishingProtectionProtoP\x01ZYgoogle.golang.org/genproto/googleapis/cloud/phishingprotection/v1beta1;phishingprotection\xa2\x02\x04GCPP\xaa\x02\'Google.Cloud.PhishingProtection.V1Beta1\xca\x02\'Google\\Cloud\\PhishingProtection\\V1beta1b\x06proto3' + '\nFgoogle/cloud/phishingprotection_v1beta1/proto/phishingprotection.proto\x12\'google.cloud.phishingprotection.v1beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto"n\n\x15ReportPhishingRequest\x12\x43\n\x06parent\x18\x01 \x01(\tB3\xe0\x41\x02\xfa\x41-\n+cloudresourcemanager.googleapis.com/Project\x12\x10\n\x03uri\x18\x02 \x01(\tB\x03\xe0\x41\x02"\x18\n\x16ReportPhishingResponse2\xd3\x02\n PhishingProtectionServiceV1Beta1\x12\xd7\x01\n\x0eReportPhishing\x12>.google.cloud.phishingprotection.v1beta1.ReportPhishingRequest\x1a?.google.cloud.phishingprotection.v1beta1.ReportPhishingResponse"D\x82\xd3\xe4\x93\x02\x31",/v1beta1/{parent=projects/*}/phishing:report:\x01*\xda\x41\nparent,uri\x1aU\xca\x41!phishingprotection.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformB\xf8\x01\n%com.google.phishingprotection.v1beta1B\x17PhishingProtectionProtoP\x01ZYgoogle.golang.org/genproto/googleapis/cloud/phishingprotection/v1beta1;phishingprotection\xa2\x02\x04GCPP\xaa\x02\'Google.Cloud.PhishingProtection.V1Beta1\xca\x02\'Google\\Cloud\\PhishingProtection\\V1beta1b\x06proto3' ), - dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR], + dependencies=[ + google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_client__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, + google_dot_api_dot_resource__pb2.DESCRIPTOR, + ], ) @@ -54,7 +62,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A-\n+cloudresourcemanager.googleapis.com/Project" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -72,7 +82,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -84,8 +94,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=145, - serialized_end=197, + serialized_start=230, + serialized_end=340, ) @@ -104,8 +114,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=199, - serialized_end=223, + serialized_start=342, + serialized_end=366, ) DESCRIPTOR.message_types_by_name["ReportPhishingRequest"] = _REPORTPHISHINGREQUEST @@ -126,8 +136,8 @@ Required. The name of the project for which the report will be created, in the format "projects/{project\_number}". uri: - The URI that is being reported for phishing content to be - analyzed. + Required. The URI that is being reported for phishing content + to be analyzed. """, # @@protoc_insertion_point(class_scope:google.cloud.phishingprotection.v1beta1.ReportPhishingRequest) ), @@ -149,15 +159,19 @@ DESCRIPTOR._options = None +_REPORTPHISHINGREQUEST.fields_by_name["parent"]._options = None +_REPORTPHISHINGREQUEST.fields_by_name["uri"]._options = None _PHISHINGPROTECTIONSERVICEV1BETA1 = _descriptor.ServiceDescriptor( name="PhishingProtectionServiceV1Beta1", full_name="google.cloud.phishingprotection.v1beta1.PhishingProtectionServiceV1Beta1", file=DESCRIPTOR, index=0, - serialized_options=None, - serialized_start=226, - serialized_end=465, + serialized_options=_b( + "\312A!phishingprotection.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform" + ), + serialized_start=369, + serialized_end=708, methods=[ _descriptor.MethodDescriptor( name="ReportPhishing", @@ -167,7 +181,7 @@ input_type=_REPORTPHISHINGREQUEST, output_type=_REPORTPHISHINGRESPONSE, serialized_options=_b( - '\202\323\344\223\0021",/v1beta1/{parent=projects/*}/phishing:report:\001*' + '\202\323\344\223\0021",/v1beta1/{parent=projects/*}/phishing:report:\001*\332A\nparent,uri' ), ) ], diff --git a/phishingprotection/google/cloud/phishingprotection_v1beta1/proto/phishingprotection_pb2_grpc.py b/phishingprotection/google/cloud/phishingprotection_v1beta1/proto/phishingprotection_pb2_grpc.py index 30c8d4c4b948..26465d3729c2 100644 --- a/phishingprotection/google/cloud/phishingprotection_v1beta1/proto/phishingprotection_pb2_grpc.py +++ b/phishingprotection/google/cloud/phishingprotection_v1beta1/proto/phishingprotection_pb2_grpc.py @@ -29,11 +29,12 @@ class PhishingProtectionServiceV1Beta1Servicer(object): def ReportPhishing(self, request, context): """Reports a URI suspected of containing phishing content to be reviewed. Once - the report review is completed, if its result verifies the existince of - malicious phishing content, the site will be added the to [Google's Social - Engineering lists](https://support.google.com/webmasters/answer/6350487/) - in order to protect users that could get exposed to this threat in - the future. + the report review is complete, its result can be found in the Cloud + Security Command Center findings dashboard for Phishing Protection. If the + result verifies the existence of malicious phishing content, the site will + be added the to [Google's Social Engineering + lists](https://support.google.com/webmasters/answer/6350487/) in order to + protect users that could get exposed to this threat in the future. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") diff --git a/phishingprotection/setup.py b/phishingprotection/setup.py index d837cfe294af..711d727c0470 100644 --- a/phishingprotection/setup.py +++ b/phishingprotection/setup.py @@ -21,8 +21,8 @@ name = "google-cloud-phishing-protection" description = "Phishing Protection API API client library" -version = "0.1.0" -release_status = "3 - Alpha" +version = "0.2.0" +release_status = "Development Status :: 3 - Alpha" dependencies = [ "google-api-core[grpc] >= 1.14.0, < 2.0.0dev", 'enum34; python_version < "3.4"', diff --git a/phishingprotection/synth.metadata b/phishingprotection/synth.metadata index 0ddff81eb85a..9d709c78872a 100644 --- a/phishingprotection/synth.metadata +++ b/phishingprotection/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-08-06T12:33:35.867649Z", + "updateTime": "2019-10-05T12:30:18.605559Z", "sources": [ { "generator": { "name": "artman", - "version": "0.32.1", - "dockerImage": "googleapis/artman@sha256:a684d40ba9a4e15946f5f2ca6b4bd9fe301192f522e9de4fff622118775f309b" + "version": "0.38.0", + "dockerImage": "googleapis/artman@sha256:0d2f8d429110aeb8d82df6550ef4ede59d40df9062d260a1580fce688b0512bf" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "e699b0cba64ffddfae39633417180f1f65875896", - "internalRef": "261759677" + "sha": "ceb8e2fb12f048cc94caae532ef0b4cf026a78f3", + "internalRef": "272971705" } }, { diff --git a/pubsub/CHANGELOG.md b/pubsub/CHANGELOG.md index 347266bd85dc..09716f05a9cf 100644 --- a/pubsub/CHANGELOG.md +++ b/pubsub/CHANGELOG.md @@ -4,6 +4,30 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## 1.0.2 + +09-30-2019 11:57 PDT + + +### Implementation Changes + +- Streaming pull shouldn't need `subscriptions.get` permission ([#9360](https://github.com/googleapis/google-cloud-python/pull/9360)). + +## 1.0.1 + +09-27-2019 07:01 PDT + + +### Implementation Changes +- Set default stream ACK deadline to subscriptions'. ([#9268](https://github.com/googleapis/google-cloud-python/pull/9268)) + +### Documentation +- Fix intersphinx reference to requests. ([#9294](https://github.com/googleapis/google-cloud-python/pull/9294)) +- Link to correct TimeoutError in futures docs. ([#9216](https://github.com/googleapis/google-cloud-python/pull/9216)) + +### Internal / Testing Changes +- Adjust messaging RPC timeout settings (via synth). [#9279](https://github.com/googleapis/google-cloud-python/pull/9279) + ## 1.0.0 08-29-2019 09:27 PDT diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index af6883fd067e..d3b1d6f51eb6 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -44,12 +44,20 @@ exceptions.GatewayTimeout, exceptions.Aborted, ) +_TERMINATING_STREAM_ERRORS = (exceptions.Cancelled,) _MAX_LOAD = 1.0 """The load threshold above which to pause the incoming message stream.""" _RESUME_THRESHOLD = 0.8 """The load threshold below which to resume the incoming message stream.""" +_DEFAULT_STREAM_ACK_DEADLINE = 60 +"""The default message acknowledge deadline in seconds for incoming message stream. + +This default deadline is dynamically modified for the messages that are added +to the lease management. +""" + def _maybe_wrap_exception(exception): """Wraps a gRPC exception class, if needed.""" @@ -208,7 +216,7 @@ def load(self): float: The load value. """ if self._leaser is None: - return 0 + return 0.0 return max( [ @@ -384,14 +392,36 @@ def open(self, callback, on_callback_error): ) # Create the RPC + + # We must use a fixed value for the ACK deadline, as we cannot read it + # from the subscription. The latter would require `pubsub.subscriptions.get` + # permission, which is not granted to the default subscriber role + # `roles/pubsub.subscriber`. + # See also https://github.com/googleapis/google-cloud-python/issues/9339 + # + # When dynamic lease management is enabled for the "on hold" messages, + # the default stream ACK deadline should again be set based on the + # historic ACK timing data, i.e. `self.ack_histogram.percentile(99)`. + stream_ack_deadline_seconds = _DEFAULT_STREAM_ACK_DEADLINE + + get_initial_request = functools.partial( + self._get_initial_request, stream_ack_deadline_seconds + ) self._rpc = bidi.ResumableBidiRpc( start_rpc=self._client.api.streaming_pull, - initial_request=self._get_initial_request, + initial_request=get_initial_request, should_recover=self._should_recover, + should_terminate=self._should_terminate, throttle_reopen=True, ) self._rpc.add_done_callback(self._on_rpc_done) + _LOGGER.debug( + "Creating a stream, default ACK deadline set to {} seconds.".format( + stream_ack_deadline_seconds + ) + ) + # Create references to threads self._dispatcher = dispatcher.Dispatcher(self, self._scheduler.queue) self._consumer = bidi.BackgroundConsumer(self._rpc, self._on_response) @@ -462,12 +492,16 @@ def close(self, reason=None): for callback in self._close_callbacks: callback(self, reason) - def _get_initial_request(self): + def _get_initial_request(self, stream_ack_deadline_seconds): """Return the initial request for the RPC. This defines the initial request that must always be sent to Pub/Sub immediately upon opening the subscription. + Args: + stream_ack_deadline_seconds (int): + The default message acknowledge deadline for the stream. + Returns: google.cloud.pubsub_v1.types.StreamingPullRequest: A request suitable for being the first request on the stream (and not @@ -486,7 +520,7 @@ def _get_initial_request(self): request = types.StreamingPullRequest( modify_deadline_ack_ids=list(lease_ids), modify_deadline_seconds=[self.ack_deadline] * len(lease_ids), - stream_ack_deadline_seconds=self.ack_histogram.percentile(99), + stream_ack_deadline_seconds=stream_ack_deadline_seconds, subscription=self._subscription, ) @@ -511,14 +545,6 @@ def _on_response(self, response): self._messages_on_hold.qsize(), ) - # Immediately modack the messages we received, as this tells the server - # that we've received them. - items = [ - requests.ModAckRequest(message.ack_id, self._ack_histogram.percentile(99)) - for message in response.received_messages - ] - self._dispatcher.modify_ack_deadline(items) - invoke_callbacks_for = [] for received_message in response.received_messages: @@ -535,6 +561,15 @@ def _on_response(self, response): else: self._messages_on_hold.put(message) + # Immediately (i.e. without waiting for the auto lease management) + # modack the messages we received and not put on hold, as this tells + # the server that we've received them. + items = [ + requests.ModAckRequest(message.ack_id, self._ack_histogram.percentile(99)) + for message in invoke_callbacks_for + ] + self._dispatcher.modify_ack_deadline(items) + _LOGGER.debug( "Scheduling callbacks for %s new messages, new total on hold %s.", len(invoke_callbacks_for), @@ -565,6 +600,26 @@ def _should_recover(self, exception): _LOGGER.info("Observed non-recoverable stream error %s", exception) return False + def _should_terminate(self, exception): + """Determine if an error on the RPC stream should be terminated. + + If the exception is one of the terminating exceptions, this will signal + to the consumer thread that it should terminate. + + This will cause the stream to exit when it returns :data:`True`. + + Returns: + bool: Indicates if the caller should terminate or attempt recovery. + Will be :data:`True` if the ``exception`` is "acceptable", i.e. + in a list of terminating exceptions. + """ + exception = _maybe_wrap_exception(exception) + if isinstance(exception, _TERMINATING_STREAM_ERRORS): + _LOGGER.info("Observed terminating stream error %s", exception) + return True + _LOGGER.info("Observed non-terminating stream error %s", exception) + return False + def _on_rpc_done(self, future): """Triggered whenever the underlying RPC terminates without recovery. diff --git a/pubsub/setup.py b/pubsub/setup.py index 9a13e8ae0f2c..45e2cc04c07d 100644 --- a/pubsub/setup.py +++ b/pubsub/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-pubsub" description = "Google Cloud Pub/Sub API client library" -version = "1.0.0" +version = "1.0.2" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' diff --git a/pubsub/tests/system.py b/pubsub/tests/system.py index 7ffb4a580194..fd7473e1e53b 100644 --- a/pubsub/tests/system.py +++ b/pubsub/tests/system.py @@ -17,6 +17,7 @@ import datetime import itertools import operator as op +import os import threading import time @@ -381,6 +382,59 @@ class CallbackError(Exception): with pytest.raises(CallbackError): future.result(timeout=30) + @pytest.mark.xfail( + reason="The default stream ACK deadline is static and received messages " + "exceeding FlowControl.max_messages are currently not lease managed." + ) + def test_streaming_pull_ack_deadline( + self, publisher, subscriber, project, topic_path, subscription_path, cleanup + ): + # Make sure the topic and subscription get deleted. + cleanup.append((publisher.delete_topic, topic_path)) + cleanup.append((subscriber.delete_subscription, subscription_path)) + + # Create a topic and a subscription, then subscribe to the topic. This + # must happen before the messages are published. + publisher.create_topic(topic_path) + + # Subscribe to the topic. This must happen before the messages + # are published. + subscriber.create_subscription( + subscription_path, topic_path, ack_deadline_seconds=240 + ) + + # publish some messages and wait for completion + self._publish_messages(publisher, topic_path, batch_sizes=[2]) + + # subscribe to the topic + callback = StreamingPullCallback( + processing_time=70, # more than the default stream ACK deadline (60s) + resolve_at_msg_count=3, # one more than the published messages count + ) + flow_control = types.FlowControl(max_messages=1) + subscription_future = subscriber.subscribe( + subscription_path, callback, flow_control=flow_control + ) + + # We expect to process the first two messages in 2 * 70 seconds, and + # any duplicate message that is re-sent by the backend in additional + # 70 seconds, totalling 210 seconds (+ overhead) --> if there have been + # no duplicates in 240 seconds, we can reasonably assume that there + # won't be any. + try: + callback.done_future.result(timeout=240) + except exceptions.TimeoutError: + # future timed out, because we received no excessive messages + assert sorted(callback.seen_message_ids) == [1, 2] + else: + pytest.fail( + "Expected to receive 2 messages, but got at least {}.".format( + len(callback.seen_message_ids) + ) + ) + finally: + subscription_future.cancel() + def test_streaming_pull_max_messages( self, publisher, topic_path, subscriber, subscription_path, cleanup ): @@ -435,6 +489,45 @@ def test_streaming_pull_max_messages( finally: subscription_future.cancel() # trigger clean shutdown + @pytest.mark.skipif( + "KOKORO_GFILE_DIR" not in os.environ, + reason="Requires Kokoro environment with a limited subscriber service account.", + ) + def test_streaming_pull_subscriber_permissions_sufficient( + self, publisher, topic_path, subscriber, subscription_path, cleanup + ): + + # Make sure the topic and subscription get deleted. + cleanup.append((publisher.delete_topic, topic_path)) + cleanup.append((subscriber.delete_subscription, subscription_path)) + + # create a topic and subscribe to it + publisher.create_topic(topic_path) + subscriber.create_subscription(subscription_path, topic_path) + + # A service account granting only the pubsub.subscriber role must be used. + filename = os.path.join( + os.environ["KOKORO_GFILE_DIR"], "pubsub-subscriber-service-account.json" + ) + streaming_pull_subscriber = type(subscriber).from_service_account_file(filename) + + # Subscribe to the topic, publish a message, and verify that subscriber + # successfully pulls and processes it. + callback = StreamingPullCallback(processing_time=0.01, resolve_at_msg_count=1) + future = streaming_pull_subscriber.subscribe(subscription_path, callback) + self._publish_messages(publisher, topic_path, batch_sizes=[1]) + + try: + callback.done_future.result(timeout=10) + except exceptions.TimeoutError: + pytest.fail( + "Timeout: receiving/processing streamed messages took too long." + ) + else: + assert 1 in callback.seen_message_ids + finally: + future.cancel() + def _publish_messages(self, publisher, topic_path, batch_sizes): """Publish ``count`` messages in batches and wait until completion.""" publish_futures = [] diff --git a/pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py b/pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py index 877ccf97fd9a..a69ea5ca5268 100644 --- a/pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py +++ b/pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py @@ -404,6 +404,8 @@ def test_heartbeat_inactive(): "google.cloud.pubsub_v1.subscriber._protocol.heartbeater.Heartbeater", autospec=True ) def test_open(heartbeater, dispatcher, leaser, background_consumer, resumable_bidi_rpc): + stream_ack_deadline = streaming_pull_manager._DEFAULT_STREAM_ACK_DEADLINE + manager = make_manager() manager.open(mock.sentinel.callback, mock.sentinel.on_callback_error) @@ -426,10 +428,16 @@ def test_open(heartbeater, dispatcher, leaser, background_consumer, resumable_bi resumable_bidi_rpc.assert_called_once_with( start_rpc=manager._client.api.streaming_pull, - initial_request=manager._get_initial_request, + initial_request=mock.ANY, should_recover=manager._should_recover, + should_terminate=manager._should_terminate, throttle_reopen=True, ) + initial_request_arg = resumable_bidi_rpc.call_args.kwargs["initial_request"] + assert initial_request_arg.func == manager._get_initial_request + assert initial_request_arg.args[0] == stream_ack_deadline + assert not manager._client.api.get_subscription.called + resumable_bidi_rpc.return_value.add_done_callback.assert_called_once_with( manager._on_rpc_done ) @@ -574,11 +582,11 @@ def test__get_initial_request(): manager._leaser = mock.create_autospec(leaser.Leaser, instance=True) manager._leaser.ack_ids = ["1", "2"] - initial_request = manager._get_initial_request() + initial_request = manager._get_initial_request(123) assert isinstance(initial_request, types.StreamingPullRequest) assert initial_request.subscription == "subscription-name" - assert initial_request.stream_ack_deadline_seconds == 10 + assert initial_request.stream_ack_deadline_seconds == 123 assert initial_request.modify_deadline_ack_ids == ["1", "2"] assert initial_request.modify_deadline_seconds == [10, 10] @@ -587,11 +595,11 @@ def test__get_initial_request_wo_leaser(): manager = make_manager() manager._leaser = None - initial_request = manager._get_initial_request() + initial_request = manager._get_initial_request(123) assert isinstance(initial_request, types.StreamingPullRequest) assert initial_request.subscription == "subscription-name" - assert initial_request.stream_ack_deadline_seconds == 10 + assert initial_request.stream_ack_deadline_seconds == 123 assert initial_request.modify_deadline_ack_ids == [] assert initial_request.modify_deadline_seconds == [] @@ -660,12 +668,10 @@ def test__on_response_with_leaser_overload(): # are called in the expected way. manager._on_response(response) + # only the messages that are added to the lease management and dispatched to + # callbacks should have their ACK deadline extended dispatcher.modify_ack_deadline.assert_called_once_with( - [ - requests.ModAckRequest("fack", 10), - requests.ModAckRequest("back", 10), - requests.ModAckRequest("zack", 10), - ] + [requests.ModAckRequest("fack", 10)] ) # one message should be scheduled, the leaser capacity allows for it @@ -719,6 +725,23 @@ def test__should_recover_false(): assert manager._should_recover(exc) is False +def test__should_terminate_true(): + manager = make_manager() + + details = "Cancelled. Go away, before I taunt you a second time." + exc = exceptions.Cancelled(details) + + assert manager._should_terminate(exc) is True + + +def test__should_terminate_false(): + manager = make_manager() + + exc = TypeError("wahhhhhh") + + assert manager._should_terminate(exc) is False + + @mock.patch("threading.Thread", autospec=True) def test__on_rpc_done(thread): manager = make_manager() diff --git a/recommender/.coveragerc b/recommender/.coveragerc new file mode 100644 index 000000000000..b178b094aa1d --- /dev/null +++ b/recommender/.coveragerc @@ -0,0 +1,19 @@ +# Generated by synthtool. DO NOT EDIT! +[run] +branch = True + +[report] +fail_under = 100 +show_missing = True +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ + # Ignore abstract methods + raise NotImplementedError +omit = + */gapic/*.py + */proto/*.py + */core/*.py + */site-packages/*.py \ No newline at end of file diff --git a/recommender/.flake8 b/recommender/.flake8 new file mode 100644 index 000000000000..0268ecc9c55c --- /dev/null +++ b/recommender/.flake8 @@ -0,0 +1,14 @@ +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + *_pb2.py + + # Standard linting exemptions. + __pycache__, + .git, + *.pyc, + conf.py diff --git a/recommender/.repo-metadata.json b/recommender/.repo-metadata.json new file mode 100644 index 000000000000..b3598c302da7 --- /dev/null +++ b/recommender/.repo-metadata.json @@ -0,0 +1,13 @@ +{ + "name": "recommender", + "name_pretty": "Cloud Recommender API", + "product_documentation": "https://cloud.google.com/recommender", + "client_documentation": "https://googleapis.dev/python/recommender/latest", + "issue_tracker": "", + "release_level": "alpha", + "language": "python", + "repo": "googleapis/google-cloud-python", + "distribution_name": "google-cloud-recommender", + "api_id": "recommender.googleapis.com", + "requires_billing": true +} \ No newline at end of file diff --git a/recommender/CHANGELOG.md b/recommender/CHANGELOG.md new file mode 100644 index 000000000000..b59d7ccaa076 --- /dev/null +++ b/recommender/CHANGELOG.md @@ -0,0 +1,13 @@ +# Changelog + +[PyPI History][1] + +[1]: https://pypi.org/project/google-cloud-recommender/#history + +## 0.1.0 + +09-27-2019 12:20 PDT + +### New Features +- initial release of v1beta1 ([#9257](https://github.com/googleapis/google-cloud-python/pull/9257)) + diff --git a/recommender/LICENSE b/recommender/LICENSE new file mode 100644 index 000000000000..a8ee855de2aa --- /dev/null +++ b/recommender/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + https://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/recommender/MANIFEST.in b/recommender/MANIFEST.in new file mode 100644 index 000000000000..9cbf175afe6b --- /dev/null +++ b/recommender/MANIFEST.in @@ -0,0 +1,5 @@ +include README.rst LICENSE +recursive-include google *.json *.proto +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ diff --git a/recommender/README.rst b/recommender/README.rst new file mode 100644 index 000000000000..a9eff8727c19 --- /dev/null +++ b/recommender/README.rst @@ -0,0 +1,80 @@ +Python Client for Recommender API (`Alpha`_) +============================================ + +`Recommender API`_: + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. _Alpha: https://github.com/googleapis/google-cloud-python/blob/master/README.rst +.. _Recommender API: https://cloud.google.com/recommender +.. _Client Library Documentation: https://googleapis.dev/python/recommender/latest +.. _Product Documentation: https://cloud.google.com/recommender/docs + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the Recommender API.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the Recommender API.: https://cloud.google.com/recommender +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ +Python >= 3.5 + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + source /bin/activate + /bin/pip install google-cloud-recommender + + +Windows +^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + \Scripts\activate + \Scripts\pip.exe install google-cloud-recommender + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for Recommender API + API to see other available methods on the client. +- Read the `Recommender API Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `repository’s main README`_ to see the full list of Cloud + APIs that we cover. + +.. _Recommender API Product documentation: https://cloud.google.com/recommender +.. _repository’s main README: https://github.com/googleapis/google-cloud-python/blob/master/README.rst diff --git a/recommender/docs/README.rst b/recommender/docs/README.rst new file mode 120000 index 000000000000..89a0106941ff --- /dev/null +++ b/recommender/docs/README.rst @@ -0,0 +1 @@ +../README.rst \ No newline at end of file diff --git a/docs/conf.py b/recommender/docs/conf.py similarity index 50% rename from docs/conf.py rename to recommender/docs/conf.py index 1aae38befbf6..fea4c9ab3b66 100644 --- a/docs/conf.py +++ b/recommender/docs/conf.py @@ -1,21 +1,9 @@ -# Copyright 2016 Google LLC +# -*- coding: utf-8 -*- # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at +# google-cloud-recommender documentation build configuration file # -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# google-cloud documentation build configuration file, created by -# sphinx-quickstart on Tue Jan 21 22:24:47 2014. -# -# This file is execfile()d with the current directory set to its containing dir. +# This file is execfile()d with the current directory set to its +# containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. @@ -23,331 +11,353 @@ # All configuration values have a default; values that are commented out # serve to show the default. -import email +import sys import os -import pkg_resources -import shutil - -from sphinx.util import logging - -logger = logging.getLogger(__name__) +import shlex # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. -# sys.path.insert(0, os.path.abspath('..')) +sys.path.insert(0, os.path.abspath("..")) -# -- General configuration ----------------------------------------------------- +__version__ = "0.1.0" + +# -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = '1.6.3' +needs_sphinx = "1.6.3" -# Add any Sphinx extension module names here, as strings. They can be extensions -# coming with Sphinx (named 'sphinx.ext.*') or your custom ones. +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. extensions = [ - 'sphinx.ext.autodoc', - 'sphinx.ext.autosummary', - 'sphinx.ext.doctest', - 'sphinx.ext.intersphinx', - 'sphinx.ext.napoleon', - 'sphinx.ext.todo', - 'sphinx.ext.viewcode', + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", ] +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_flags = ["members"] +autosummary_generate = True + + # Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] +templates_path = ["_templates"] # Allow markdown includes (so releases.md can include CHANGLEOG.md) # http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = { - '.md': 'recommonmark.parser.CommonMarkParser', -} +source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} -# The suffix of source filenames. -source_suffix = ['.rst', '.md'] +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = [".rst", ".md"] # The encoding of source files. -#source_encoding = 'utf-8-sig' +# source_encoding = 'utf-8-sig' # The master toctree document. -master_doc = 'index' +master_doc = "index" # General information about the project. -project = u'google-cloud' -copyright = u'2014-2017, Google' +project = u"google-cloud-recommender" +copyright = u"2017, Google" +author = u"Google APIs" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # +# The full version, including alpha/beta/rc tags. +release = __version__ # The short X.Y version. -distro = pkg_resources.get_distribution('google-cloud-core') -release = os.getenv('SPHINX_RELEASE', distro.version) +version = ".".join(release.split(".")[0:2]) # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. -#language = None +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: -#today = '' +# today = '' # Else, today_fmt is used as the format for a strftime call. -#today_fmt = '%B %d, %Y' +# today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. -exclude_patterns = ['_build'] +exclude_patterns = ["_build"] -# The reST default role (used for this markup: `text`) to use for all documents. -#default_role = None +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. -#add_function_parentheses = True +# add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). -#add_module_names = True +# add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. -#show_authors = False +# show_authors = False # The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' +pygments_style = "sphinx" # A list of ignored prefixes for module index sorting. -#modindex_common_prefix = [] +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True -# -- Options for HTML output --------------------------------------------------- +# -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. -html_theme = 'alabaster' +html_theme = "alabaster" # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. html_theme_options = { - 'description': 'Google Cloud Client Libraries for Python', - 'github_user': 'GoogleCloudPlatform', - 'github_repo': 'google-cloud-python', - 'github_banner': True, - 'font_family': "'Roboto', Georgia, sans", - 'head_font_family': "'Roboto', Georgia, serif", - 'code_font_family': "'Roboto Mono', 'Consolas', monospace", + "description": "Google Cloud Client Libraries for Python", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", } # Add any paths that contain custom themes here, relative to this directory. -#html_theme_path = [] +# html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". -#html_title = None +# html_title = None # A shorter title for the navigation bar. Default is the same as html_title. -#html_short_title = None +# html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. -#html_logo = None +# html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. -html_favicon = '_static/images/favicon.ico' +# html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] +# html_static_path = [] -html_add_permalinks = '#' +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. -#html_last_updated_fmt = '%b %d, %Y' +# html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. -#html_use_smartypants = True +# html_use_smartypants = True # Custom sidebar templates, maps document names to template names. -html_sidebars = { - '**': [ - 'about.html', - 'navigation.html', - 'relations.html', - 'searchbox.html', - ] -} +# html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. -#html_additional_pages = {} +# html_additional_pages = {} # If false, no module index is generated. -#html_domain_indices = True +# html_domain_indices = True # If false, no index is generated. -#html_use_index = True +# html_use_index = True # If true, the index is split into individual pages for each letter. -#html_split_index = False +# html_split_index = False # If true, links to the reST sources are added to the pages. -#html_show_sourcelink = True +# html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -#html_show_sphinx = True +# html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -#html_show_copyright = True +# html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. -#html_use_opensearch = '' +# html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). -#html_file_suffix = None +# html_file_suffix = None -# Output file base name for HTML help builder. -htmlhelp_basename = 'google-cloud-doc' +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' -html_context = {} +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' -# -- Options for LaTeX output -------------------------------------------------- +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-recommender-doc" -latex_elements = { -# The paper size ('letterpaper' or 'a4paper'). -#'papersize': 'letterpaper', +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] -# The font size ('10pt', '11pt' or '12pt'). -#'pointsize': '10pt', +# -- Options for LaTeX output --------------------------------------------- -# Additional stuff for the LaTeX preamble. -#'preamble': '', +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + #'preamble': '', + # Latex figure (float) alignment + #'figure_align': 'htbp', } -metadata = distro.get_metadata(distro.PKG_INFO) -author = email.message_from_string(metadata).get('Author') # Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, author, documentclass [howto/manual]). +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). latex_documents = [ - ('index', 'google-cloud.tex', u'google-cloud Documentation', - author, 'manual'), + ( + master_doc, + "google-cloud-recommender.tex", + u"google-cloud-recommender Documentation", + author, + "manual", + ) ] # The name of an image file (relative to this directory) to place at the top of # the title page. -#latex_logo = None +# latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. -#latex_use_parts = False +# latex_use_parts = False # If true, show page references after internal links. -#latex_show_pagerefs = False +# latex_show_pagerefs = False # If true, show URL addresses after external links. -#latex_show_urls = False +# latex_show_urls = False # Documents to append as an appendix to all manuals. -#latex_appendices = [] +# latex_appendices = [] # If false, no module index is generated. -#latex_domain_indices = True - - -# -- Options for warnings ------------------------------------------------------ +# latex_domain_indices = True -suppress_warnings = [ - # Temporarily suppress ths to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - 'ref.python' -] - - -# -- Options for manual page output -------------------------------------------- +# -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ - ('index', 'google-cloud', u'google-cloud Documentation', - [author], 1) + ( + master_doc, + "google-cloud-recommender", + u"google-cloud-recommender Documentation", + [author], + 1, + ) ] # If true, show URL addresses after external links. -#man_show_urls = False +# man_show_urls = False -# -- Options for Texinfo output ------------------------------------------------ +# -- Options for Texinfo output ------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ - ('index', 'google-cloud', u'google-cloud Documentation', - author, 'google-cloud', 'Python API for Google Cloud.', - 'Miscellaneous'), + ( + master_doc, + "google-cloud-recommender", + u"google-cloud-recommender Documentation", + author, + "google-cloud-recommender", + "GAPIC library for the {metadata.shortName} v1beta1 service", + "APIs", + ) ] # Documents to append as an appendix to all manuals. -#texinfo_appendices = [] +# texinfo_appendices = [] # If false, no module index is generated. -#texinfo_domain_indices = True +# texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. -#texinfo_show_urls = 'footnote' +# texinfo_show_urls = 'footnote' -# This pulls class descriptions from the class docstring, -# and parameter definitions from the __init__ docstring. -autoclass_content = 'both' +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False -# Automatically generate API reference stubs from autosummary. -# http://www.sphinx-doc.org/en/master/ext/autosummary.html#generating-stub-pages-automatically -autosummary_generate = True -# Configuration for intersphinx: +# Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = { - 'fastavro': ('https://fastavro.readthedocs.io/en/stable/', None), - 'google-auth': ('https://google-auth.readthedocs.io/en/stable', None), - 'google-gax': ('https://gax-python.readthedocs.io/en/latest/', None), - 'grpc': ('https://grpc.io/grpc/python/', None), - 'pandas': ('https://pandas.pydata.org/pandas-docs/stable/', None), - 'python': ('https://docs.python.org/3', None), - 'requests': ('https://requests.kennethreitz.org/en/stable/', None), + "python": ("http://python.readthedocs.org/en/latest/", None), + "gax": ("https://gax-python.readthedocs.org/en/latest/", None), + "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), + "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), + "grpc": ("https://grpc.io/grpc/python/", None), + "requests": ("https://requests.kennethreitz.org/en/master/", None), + "fastavro": ("https://fastavro.readthedocs.io/en/stable/", None), + "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), } -# Static HTML pages, e.g. to support redirects -# See: https://tech.signavio.com/2017/managing-sphinx-redirects -# HTML pages to be copied from source to target -static_html_pages = [ - 'datastore/usage.html', - 'dns/usage.html', - 'bigquery/usage.html', - 'bigquery/generated/google.cloud.bigquery.magics.html', - 'runtimeconfig/usage.html', - 'spanner/usage.html', - 'trace/starting.html', -] -def copy_static_html_pages(app, exception): - if exception is None and app.builder.name == 'html': - for static_html_page in static_html_pages: - target_path = app.outdir + '/' + static_html_page - src_path = app.srcdir + '/' + static_html_page - if os.path.isfile(src_path): - logger.info( - 'Copying static html: %s -> %s', src_path, target_path) - shutil.copyfile(src_path, target_path) - -def setup(app): - app.connect('build-finished', copy_static_html_pages) +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/recommender/docs/gapic/v1beta1/api.rst b/recommender/docs/gapic/v1beta1/api.rst new file mode 100644 index 000000000000..d9458d258e2a --- /dev/null +++ b/recommender/docs/gapic/v1beta1/api.rst @@ -0,0 +1,6 @@ +Client for Recommender API +========================== + +.. automodule:: google.cloud.recommender_v1beta1 + :members: + :inherited-members: \ No newline at end of file diff --git a/recommender/docs/gapic/v1beta1/types.rst b/recommender/docs/gapic/v1beta1/types.rst new file mode 100644 index 000000000000..3771da42e95f --- /dev/null +++ b/recommender/docs/gapic/v1beta1/types.rst @@ -0,0 +1,5 @@ +Types for Recommender API Client +================================ + +.. automodule:: google.cloud.recommender_v1beta1.types + :members: \ No newline at end of file diff --git a/recommender/docs/index.rst b/recommender/docs/index.rst new file mode 100644 index 000000000000..37c319bfaffe --- /dev/null +++ b/recommender/docs/index.rst @@ -0,0 +1,9 @@ +.. include:: README.rst + +Api Reference +------------- +.. toctree:: + :maxdepth: 2 + + gapic/v1beta1/api + gapic/v1beta1/types \ No newline at end of file diff --git a/recommender/google/__init__.py b/recommender/google/__init__.py new file mode 100644 index 000000000000..8fcc60e2b9c6 --- /dev/null +++ b/recommender/google/__init__.py @@ -0,0 +1,24 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +try: + import pkg_resources + + pkg_resources.declare_namespace(__name__) +except ImportError: + import pkgutil + + __path__ = pkgutil.extend_path(__path__, __name__) diff --git a/recommender/google/cloud/__init__.py b/recommender/google/cloud/__init__.py new file mode 100644 index 000000000000..8fcc60e2b9c6 --- /dev/null +++ b/recommender/google/cloud/__init__.py @@ -0,0 +1,24 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +try: + import pkg_resources + + pkg_resources.declare_namespace(__name__) +except ImportError: + import pkgutil + + __path__ = pkgutil.extend_path(__path__, __name__) diff --git a/recommender/google/cloud/recommender.py b/recommender/google/cloud/recommender.py new file mode 100644 index 000000000000..3caae5792568 --- /dev/null +++ b/recommender/google/cloud/recommender.py @@ -0,0 +1,25 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from __future__ import absolute_import + +from google.cloud.recommender_v1beta1 import RecommenderClient +from google.cloud.recommender_v1beta1 import enums +from google.cloud.recommender_v1beta1 import types + + +__all__ = ("enums", "types", "RecommenderClient") diff --git a/videointelligence/google/cloud/videointelligence_v1beta1/__init__.py b/recommender/google/cloud/recommender_v1beta1/__init__.py similarity index 58% rename from videointelligence/google/cloud/videointelligence_v1beta1/__init__.py rename to recommender/google/cloud/recommender_v1beta1/__init__.py index d71748798c3c..2ea08cfa4d18 100644 --- a/videointelligence/google/cloud/videointelligence_v1beta1/__init__.py +++ b/recommender/google/cloud/recommender_v1beta1/__init__.py @@ -17,18 +17,14 @@ from __future__ import absolute_import -from google.cloud.videointelligence_v1beta1 import types -from google.cloud.videointelligence_v1beta1.gapic import enums -from google.cloud.videointelligence_v1beta1.gapic import ( - video_intelligence_service_client, -) +from google.cloud.recommender_v1beta1 import types +from google.cloud.recommender_v1beta1.gapic import enums +from google.cloud.recommender_v1beta1.gapic import recommender_client -class VideoIntelligenceServiceClient( - video_intelligence_service_client.VideoIntelligenceServiceClient -): - __doc__ = video_intelligence_service_client.VideoIntelligenceServiceClient.__doc__ +class RecommenderClient(recommender_client.RecommenderClient): + __doc__ = recommender_client.RecommenderClient.__doc__ enums = enums -__all__ = ("enums", "types", "VideoIntelligenceServiceClient") +__all__ = ("enums", "types", "RecommenderClient") diff --git a/videointelligence/google/cloud/videointelligence_v1beta1/proto/__init__.py b/recommender/google/cloud/recommender_v1beta1/gapic/__init__.py similarity index 100% rename from videointelligence/google/cloud/videointelligence_v1beta1/proto/__init__.py rename to recommender/google/cloud/recommender_v1beta1/gapic/__init__.py diff --git a/recommender/google/cloud/recommender_v1beta1/gapic/enums.py b/recommender/google/cloud/recommender_v1beta1/gapic/enums.py new file mode 100644 index 000000000000..5d7fbb6b7a31 --- /dev/null +++ b/recommender/google/cloud/recommender_v1beta1/gapic/enums.py @@ -0,0 +1,88 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Wrappers for protocol buffer enum types.""" + +import enum + + +class NullValue(enum.IntEnum): + """ + ``NullValue`` is a singleton enumeration to represent the null value for + the ``Value`` type union. + + The JSON representation for ``NullValue`` is JSON ``null``. + + Attributes: + NULL_VALUE (int): Null value. + """ + + NULL_VALUE = 0 + + +class Impact(object): + class Category(enum.IntEnum): + """ + The category of the impact. + + Attributes: + CATEGORY_UNSPECIFIED (int): Default unspecified category. Don't use directly. + COST (int): Indicates a potential increase or decrease in cost. + SECURITY (int): Indicates a potential increase or decrease in security. + PERFORMANCE (int): Indicates a potential increase or decrease in performance. + """ + + CATEGORY_UNSPECIFIED = 0 + COST = 1 + SECURITY = 2 + PERFORMANCE = 3 + + +class RecommendationStateInfo(object): + class State(enum.IntEnum): + """ + Represents Recommendation State + + Attributes: + STATE_UNSPECIFIED (int): Default state. Don't use directly. + ACTIVE (int): Recommendation is active and can be applied. Recommendations content can + be updated by Google. + + ACTIVE recommendations can be marked as CLAIMED, SUCCEEDED, or FAILED. + CLAIMED (int): Recommendation is in claimed state. Recommendations content is + immutable and cannot be updated by Google. + + CLAIMED recommendations can be marked as CLAIMED, SUCCEEDED, or FAILED. + SUCCEEDED (int): Recommendation is in succeeded state. Recommendations content is + immutable and cannot be updated by Google. + + SUCCEEDED recommendations can be marked as SUCCEEDED, or FAILED. + FAILED (int): Recommendation is in failed state. Recommendations content is immutable + and cannot be updated by Google. + + FAILED recommendations can be marked as SUCCEEDED, or FAILED. + DISMISSED (int): Recommendation is in dismissed state. Recommendation content can be + updated by Google. + + DISMISSED recommendations can be marked as ACTIVE. + """ + + STATE_UNSPECIFIED = 0 + ACTIVE = 1 + CLAIMED = 6 + SUCCEEDED = 3 + FAILED = 4 + DISMISSED = 5 diff --git a/recommender/google/cloud/recommender_v1beta1/gapic/recommender_client.py b/recommender/google/cloud/recommender_v1beta1/gapic/recommender_client.py new file mode 100644 index 000000000000..deb485bb83f3 --- /dev/null +++ b/recommender/google/cloud/recommender_v1beta1/gapic/recommender_client.py @@ -0,0 +1,670 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Accesses the google.cloud.recommender.v1beta1 Recommender API.""" + +import functools +import pkg_resources +import warnings + +from google.oauth2 import service_account +import google.api_core.client_options +import google.api_core.gapic_v1.client_info +import google.api_core.gapic_v1.config +import google.api_core.gapic_v1.method +import google.api_core.gapic_v1.routing_header +import google.api_core.grpc_helpers +import google.api_core.page_iterator +import google.api_core.path_template +import grpc + +from google.cloud.recommender_v1beta1.gapic import enums +from google.cloud.recommender_v1beta1.gapic import recommender_client_config +from google.cloud.recommender_v1beta1.gapic.transports import recommender_grpc_transport +from google.cloud.recommender_v1beta1.proto import recommendation_pb2 +from google.cloud.recommender_v1beta1.proto import recommender_service_pb2 +from google.cloud.recommender_v1beta1.proto import recommender_service_pb2_grpc + + +_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( + "google-cloud-recommender" +).version + + +class RecommenderClient(object): + """ + Provides recommendations for cloud customers for various categories like + performance optimization, cost savings, reliability, feature discovery, etc. + These recommendations are generated automatically based on analysis of user + resources, configuration and monitoring metrics. + """ + + SERVICE_ADDRESS = "recommender.googleapis.com:443" + """The default address of the service.""" + + # The name of the interface for this client. This is the key used to + # find the method configuration in the client_config dictionary. + _INTERFACE_NAME = "google.cloud.recommender.v1beta1.Recommender" + + @classmethod + def from_service_account_file(cls, filename, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RecommenderClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @classmethod + def recommendation_path(cls, project, location, recommender, recommendation): + """Return a fully-qualified recommendation string.""" + return google.api_core.path_template.expand( + "projects/{project}/locations/{location}/recommenders/{recommender}/recommendations/{recommendation}", + project=project, + location=location, + recommender=recommender, + recommendation=recommendation, + ) + + @classmethod + def recommender_path(cls, project, location, recommender): + """Return a fully-qualified recommender string.""" + return google.api_core.path_template.expand( + "projects/{project}/locations/{location}/recommenders/{recommender}", + project=project, + location=location, + recommender=recommender, + ) + + def __init__( + self, + transport=None, + channel=None, + credentials=None, + client_config=None, + client_info=None, + client_options=None, + ): + """Constructor. + + Args: + transport (Union[~.RecommenderGrpcTransport, + Callable[[~.Credentials, type], ~.RecommenderGrpcTransport]): A transport + instance, responsible for actually making the API calls. + The default transport uses the gRPC protocol. + This argument may also be a callable which returns a + transport instance. Callables will be sent the credentials + as the first argument and the default transport class as + the second argument. + channel (grpc.Channel): DEPRECATED. A ``Channel`` instance + through which to make calls. This argument is mutually exclusive + with ``credentials``; providing both will raise an exception. + credentials (google.auth.credentials.Credentials): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is mutually exclusive with providing a + transport instance to ``transport``; doing so will raise + an exception. + client_config (dict): DEPRECATED. A dictionary of call options for + each method. If not specified, the default configuration is used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + client_options (Union[dict, google.api_core.client_options.ClientOptions]): + Client options used to set user options on the client. API Endpoint + should be set through client_options. + """ + # Raise deprecation warnings for things we want to go away. + if client_config is not None: + warnings.warn( + "The `client_config` argument is deprecated.", + PendingDeprecationWarning, + stacklevel=2, + ) + else: + client_config = recommender_client_config.config + + if channel: + warnings.warn( + "The `channel` argument is deprecated; use " "`transport` instead.", + PendingDeprecationWarning, + stacklevel=2, + ) + + api_endpoint = self.SERVICE_ADDRESS + if client_options: + if type(client_options) == dict: + client_options = google.api_core.client_options.from_dict( + client_options + ) + if client_options.api_endpoint: + api_endpoint = client_options.api_endpoint + + # Instantiate the transport. + # The transport is responsible for handling serialization and + # deserialization and actually sending data to the service. + if transport: + if callable(transport): + self.transport = transport( + credentials=credentials, + default_class=recommender_grpc_transport.RecommenderGrpcTransport, + address=api_endpoint, + ) + else: + if credentials: + raise ValueError( + "Received both a transport instance and " + "credentials; these are mutually exclusive." + ) + self.transport = transport + else: + self.transport = recommender_grpc_transport.RecommenderGrpcTransport( + address=api_endpoint, channel=channel, credentials=credentials + ) + + if client_info is None: + client_info = google.api_core.gapic_v1.client_info.ClientInfo( + gapic_version=_GAPIC_LIBRARY_VERSION + ) + else: + client_info.gapic_version = _GAPIC_LIBRARY_VERSION + self._client_info = client_info + + # Parse out the default settings for retry and timeout for each RPC + # from the client configuration. + # (Ordinarily, these are the defaults specified in the `*_config.py` + # file next to this one.) + self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( + client_config["interfaces"][self._INTERFACE_NAME] + ) + + # Save a dictionary of cached API call functions. + # These are the actual callables which invoke the proper + # transport methods, wrapped with `wrap_method` to add retry, + # timeout, and the like. + self._inner_api_calls = {} + + # Service calls + def list_recommendations( + self, + parent, + page_size=None, + filter_=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Lists recommendations for a Cloud project. Requires the + recommender.\*.list IAM permission for the specified recommender. + + Example: + >>> from google.cloud import recommender_v1beta1 + >>> + >>> client = recommender_v1beta1.RecommenderClient() + >>> + >>> parent = client.recommender_path('[PROJECT]', '[LOCATION]', '[RECOMMENDER]') + >>> + >>> # Iterate over all results + >>> for element in client.list_recommendations(parent): + ... # process element + ... pass + >>> + >>> + >>> # Alternatively: + >>> + >>> # Iterate over results one page at a time + >>> for page in client.list_recommendations(parent).pages: + ... for element in page: + ... # process element + ... pass + + Args: + parent (str): Required. The container resource on which to execute the request. + Acceptable formats: + + 1. + + "projects/[PROJECT\_NUMBER]/locations/[LOCATION]/recommenders/[RECOMMENDER\_ID]", + + LOCATION here refers to GCP Locations: + https://cloud.google.com/about/locations/ + page_size (int): The maximum number of resources contained in the + underlying API response. If page streaming is performed per- + resource, this parameter does not affect the return value. If page + streaming is performed per-page, this determines the maximum number + of resources in a page. + filter_ (str): Filter expression to restrict the recommendations returned. Supported + filter fields: state\_info.state Eg: \`state\_info.state:"DISMISSED" or + state\_info.state:"FAILED" + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.api_core.page_iterator.PageIterator` instance. + An iterable of :class:`~google.cloud.recommender_v1beta1.types.Recommendation` instances. + You can also iterate over the pages of the response + using its `pages` property. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "list_recommendations" not in self._inner_api_calls: + self._inner_api_calls[ + "list_recommendations" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.list_recommendations, + default_retry=self._method_configs["ListRecommendations"].retry, + default_timeout=self._method_configs["ListRecommendations"].timeout, + client_info=self._client_info, + ) + + request = recommender_service_pb2.ListRecommendationsRequest( + parent=parent, page_size=page_size, filter=filter_ + ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("parent", parent)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + iterator = google.api_core.page_iterator.GRPCIterator( + client=None, + method=functools.partial( + self._inner_api_calls["list_recommendations"], + retry=retry, + timeout=timeout, + metadata=metadata, + ), + request=request, + items_field="recommendations", + request_token_field="page_token", + response_token_field="next_page_token", + ) + return iterator + + def get_recommendation( + self, + name, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Gets the requested recommendation. Requires the recommender.\*.get IAM + permission for the specified recommender. + + Example: + >>> from google.cloud import recommender_v1beta1 + >>> + >>> client = recommender_v1beta1.RecommenderClient() + >>> + >>> name = client.recommendation_path('[PROJECT]', '[LOCATION]', '[RECOMMENDER]', '[RECOMMENDATION]') + >>> + >>> response = client.get_recommendation(name) + + Args: + name (str): Name of the recommendation. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.recommender_v1beta1.types.Recommendation` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "get_recommendation" not in self._inner_api_calls: + self._inner_api_calls[ + "get_recommendation" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.get_recommendation, + default_retry=self._method_configs["GetRecommendation"].retry, + default_timeout=self._method_configs["GetRecommendation"].timeout, + client_info=self._client_info, + ) + + request = recommender_service_pb2.GetRecommendationRequest(name=name) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("name", name)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + return self._inner_api_calls["get_recommendation"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def mark_recommendation_claimed( + self, + name, + etag, + state_metadata=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Mark the Recommendation State as Claimed. Users can use this method to + indicate to the Recommender API that they are starting to apply the + recommendation themselves. This stops the recommendation content from + being updated. + + MarkRecommendationClaimed can be applied to recommendations in CLAIMED, + SUCCEEDED, FAILED, or ACTIVE state. + + Requires the recommender.\*.update IAM permission for the specified + recommender. + + Example: + >>> from google.cloud import recommender_v1beta1 + >>> + >>> client = recommender_v1beta1.RecommenderClient() + >>> + >>> name = client.recommendation_path('[PROJECT]', '[LOCATION]', '[RECOMMENDER]', '[RECOMMENDATION]') + >>> + >>> # TODO: Initialize `etag`: + >>> etag = '' + >>> + >>> response = client.mark_recommendation_claimed(name, etag) + + Args: + name (str): Name of the recommendation. + etag (str): Fingerprint of the Recommendation. Provides optimistic locking. + state_metadata (dict[str -> str]): State properties to include with this state. Overwrites any existing + ``state_metadata``. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.recommender_v1beta1.types.Recommendation` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "mark_recommendation_claimed" not in self._inner_api_calls: + self._inner_api_calls[ + "mark_recommendation_claimed" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.mark_recommendation_claimed, + default_retry=self._method_configs["MarkRecommendationClaimed"].retry, + default_timeout=self._method_configs[ + "MarkRecommendationClaimed" + ].timeout, + client_info=self._client_info, + ) + + request = recommender_service_pb2.MarkRecommendationClaimedRequest( + name=name, etag=etag, state_metadata=state_metadata + ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("name", name)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + return self._inner_api_calls["mark_recommendation_claimed"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def mark_recommendation_succeeded( + self, + name, + etag, + state_metadata=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Mark the Recommendation State as Succeeded. Users can use this method to + indicate to the Recommender API that they have applied the + recommendation themselves, and the operation was successful. This stops + the recommendation content from being updated. + + MarkRecommendationSucceeded can be applied to recommendations in ACTIVE, + CLAIMED, SUCCEEDED, or FAILED state. + + Requires the recommender.\*.update IAM permission for the specified + recommender. + + Example: + >>> from google.cloud import recommender_v1beta1 + >>> + >>> client = recommender_v1beta1.RecommenderClient() + >>> + >>> name = client.recommendation_path('[PROJECT]', '[LOCATION]', '[RECOMMENDER]', '[RECOMMENDATION]') + >>> + >>> # TODO: Initialize `etag`: + >>> etag = '' + >>> + >>> response = client.mark_recommendation_succeeded(name, etag) + + Args: + name (str): Name of the recommendation. + etag (str): Fingerprint of the Recommendation. Provides optimistic locking. + state_metadata (dict[str -> str]): State properties to include with this state. Overwrites any existing + ``state_metadata``. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.recommender_v1beta1.types.Recommendation` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "mark_recommendation_succeeded" not in self._inner_api_calls: + self._inner_api_calls[ + "mark_recommendation_succeeded" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.mark_recommendation_succeeded, + default_retry=self._method_configs["MarkRecommendationSucceeded"].retry, + default_timeout=self._method_configs[ + "MarkRecommendationSucceeded" + ].timeout, + client_info=self._client_info, + ) + + request = recommender_service_pb2.MarkRecommendationSucceededRequest( + name=name, etag=etag, state_metadata=state_metadata + ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("name", name)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + return self._inner_api_calls["mark_recommendation_succeeded"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def mark_recommendation_failed( + self, + name, + etag, + state_metadata=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Mark the Recommendation State as Failed. Users can use this method to + indicate to the Recommender API that they have applied the + recommendation themselves, and the operation failed. This stops the + recommendation content from being updated. + + MarkRecommendationFailed can be applied to recommendations in ACTIVE, + CLAIMED, SUCCEEDED, or FAILED state. + + Requires the recommender.\*.update IAM permission for the specified + recommender. + + Example: + >>> from google.cloud import recommender_v1beta1 + >>> + >>> client = recommender_v1beta1.RecommenderClient() + >>> + >>> name = client.recommendation_path('[PROJECT]', '[LOCATION]', '[RECOMMENDER]', '[RECOMMENDATION]') + >>> + >>> # TODO: Initialize `etag`: + >>> etag = '' + >>> + >>> response = client.mark_recommendation_failed(name, etag) + + Args: + name (str): Name of the recommendation. + etag (str): Fingerprint of the Recommendation. Provides optimistic locking. + state_metadata (dict[str -> str]): State properties to include with this state. Overwrites any existing + ``state_metadata``. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.recommender_v1beta1.types.Recommendation` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "mark_recommendation_failed" not in self._inner_api_calls: + self._inner_api_calls[ + "mark_recommendation_failed" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.mark_recommendation_failed, + default_retry=self._method_configs["MarkRecommendationFailed"].retry, + default_timeout=self._method_configs[ + "MarkRecommendationFailed" + ].timeout, + client_info=self._client_info, + ) + + request = recommender_service_pb2.MarkRecommendationFailedRequest( + name=name, etag=etag, state_metadata=state_metadata + ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("name", name)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + return self._inner_api_calls["mark_recommendation_failed"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) diff --git a/recommender/google/cloud/recommender_v1beta1/gapic/recommender_client_config.py b/recommender/google/cloud/recommender_v1beta1/gapic/recommender_client_config.py new file mode 100644 index 000000000000..a1a36482eecd --- /dev/null +++ b/recommender/google/cloud/recommender_v1beta1/gapic/recommender_client_config.py @@ -0,0 +1,48 @@ +config = { + "interfaces": { + "google.cloud.recommender.v1beta1.Recommender": { + "retry_codes": { + "idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], + "non_idempotent": [], + }, + "retry_params": { + "default": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 20000, + "rpc_timeout_multiplier": 1.0, + "max_rpc_timeout_millis": 20000, + "total_timeout_millis": 600000, + } + }, + "methods": { + "ListRecommendations": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default", + }, + "GetRecommendation": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default", + }, + "MarkRecommendationClaimed": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default", + }, + "MarkRecommendationSucceeded": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default", + }, + "MarkRecommendationFailed": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default", + }, + }, + } + } +} diff --git a/recommender/google/cloud/recommender_v1beta1/gapic/transports/__init__.py b/recommender/google/cloud/recommender_v1beta1/gapic/transports/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/recommender/google/cloud/recommender_v1beta1/gapic/transports/recommender_grpc_transport.py b/recommender/google/cloud/recommender_v1beta1/gapic/transports/recommender_grpc_transport.py new file mode 100644 index 000000000000..cc3af3d44211 --- /dev/null +++ b/recommender/google/cloud/recommender_v1beta1/gapic/transports/recommender_grpc_transport.py @@ -0,0 +1,202 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import google.api_core.grpc_helpers + +from google.cloud.recommender_v1beta1.proto import recommender_service_pb2_grpc + + +class RecommenderGrpcTransport(object): + """gRPC transport class providing stubs for + google.cloud.recommender.v1beta1 Recommender API. + + The transport provides access to the raw gRPC stubs, + which can be used to take advantage of advanced + features of gRPC. + """ + + # The scopes needed to make gRPC calls to all of the methods defined + # in this service. + _OAUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + def __init__( + self, channel=None, credentials=None, address="recommender.googleapis.com:443" + ): + """Instantiate the transport class. + + Args: + channel (grpc.Channel): A ``Channel`` instance through + which to make calls. This argument is mutually exclusive + with ``credentials``; providing both will raise an exception. + credentials (google.auth.credentials.Credentials): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If none + are specified, the client will attempt to ascertain the + credentials from the environment. + address (str): The address where the service is hosted. + """ + # If both `channel` and `credentials` are specified, raise an + # exception (channels come with credentials baked in already). + if channel is not None and credentials is not None: + raise ValueError( + "The `channel` and `credentials` arguments are mutually " "exclusive." + ) + + # Create the channel. + if channel is None: + channel = self.create_channel( + address=address, + credentials=credentials, + options={ + "grpc.max_send_message_length": -1, + "grpc.max_receive_message_length": -1, + }.items(), + ) + + self._channel = channel + + # gRPC uses objects called "stubs" that are bound to the + # channel and provide a basic method for each RPC. + self._stubs = { + "recommender_stub": recommender_service_pb2_grpc.RecommenderStub(channel) + } + + @classmethod + def create_channel( + cls, address="recommender.googleapis.com:443", credentials=None, **kwargs + ): + """Create and return a gRPC channel object. + + Args: + address (str): The host for the channel to use. + credentials (~.Credentials): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + kwargs (dict): Keyword arguments, which are passed to the + channel creation. + + Returns: + grpc.Channel: A gRPC channel object. + """ + return google.api_core.grpc_helpers.create_channel( + address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs + ) + + @property + def channel(self): + """The gRPC channel used by the transport. + + Returns: + grpc.Channel: A gRPC channel object. + """ + return self._channel + + @property + def list_recommendations(self): + """Return the gRPC stub for :meth:`RecommenderClient.list_recommendations`. + + Lists recommendations for a Cloud project. Requires the + recommender.\*.list IAM permission for the specified recommender. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["recommender_stub"].ListRecommendations + + @property + def get_recommendation(self): + """Return the gRPC stub for :meth:`RecommenderClient.get_recommendation`. + + Gets the requested recommendation. Requires the recommender.\*.get IAM + permission for the specified recommender. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["recommender_stub"].GetRecommendation + + @property + def mark_recommendation_claimed(self): + """Return the gRPC stub for :meth:`RecommenderClient.mark_recommendation_claimed`. + + Mark the Recommendation State as Claimed. Users can use this method to + indicate to the Recommender API that they are starting to apply the + recommendation themselves. This stops the recommendation content from + being updated. + + MarkRecommendationClaimed can be applied to recommendations in CLAIMED, + SUCCEEDED, FAILED, or ACTIVE state. + + Requires the recommender.\*.update IAM permission for the specified + recommender. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["recommender_stub"].MarkRecommendationClaimed + + @property + def mark_recommendation_succeeded(self): + """Return the gRPC stub for :meth:`RecommenderClient.mark_recommendation_succeeded`. + + Mark the Recommendation State as Succeeded. Users can use this method to + indicate to the Recommender API that they have applied the + recommendation themselves, and the operation was successful. This stops + the recommendation content from being updated. + + MarkRecommendationSucceeded can be applied to recommendations in ACTIVE, + CLAIMED, SUCCEEDED, or FAILED state. + + Requires the recommender.\*.update IAM permission for the specified + recommender. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["recommender_stub"].MarkRecommendationSucceeded + + @property + def mark_recommendation_failed(self): + """Return the gRPC stub for :meth:`RecommenderClient.mark_recommendation_failed`. + + Mark the Recommendation State as Failed. Users can use this method to + indicate to the Recommender API that they have applied the + recommendation themselves, and the operation failed. This stops the + recommendation content from being updated. + + MarkRecommendationFailed can be applied to recommendations in ACTIVE, + CLAIMED, SUCCEEDED, or FAILED state. + + Requires the recommender.\*.update IAM permission for the specified + recommender. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["recommender_stub"].MarkRecommendationFailed diff --git a/recommender/google/cloud/recommender_v1beta1/proto/__init__.py b/recommender/google/cloud/recommender_v1beta1/proto/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/recommender/google/cloud/recommender_v1beta1/proto/recommendation.proto b/recommender/google/cloud/recommender_v1beta1/proto/recommendation.proto new file mode 100644 index 000000000000..8980de0c4274 --- /dev/null +++ b/recommender/google/cloud/recommender_v1beta1/proto/recommendation.proto @@ -0,0 +1,234 @@ +// Copyright 2019 Google LLC. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +syntax = "proto3"; + +package google.cloud.recommender.v1beta1; + +import "google/protobuf/duration.proto"; +import "google/protobuf/struct.proto"; +import "google/protobuf/timestamp.proto"; +import "google/type/money.proto"; + +option csharp_namespace = "Google.Cloud.Recommender.V1Beta1"; +option go_package = "google.golang.org/genproto/googleapis/cloud/recommender/v1beta1;recommender"; +option java_multiple_files = true; +option java_package = "com.google.cloud.recommender.v1beta1"; +option objc_class_prefix = "CREC"; + +// A recommendation along with a suggested action. E.g., a rightsizing +// recommendation for an underutilized VM, IAM role recommendations, etc +message Recommendation { + // Name of recommendation. + // + // A project recommendation is represented as + // projects/[PROJECT_NUMBER]/locations/[LOCATION]/recommenders/[RECOMMENDER_ID]/recommendations/[RECOMMENDATION_ID] + string name = 1; + + // Free-form human readable summary in English. The maximum length is 500 + // characters. + string description = 2; + + // Contains an identifier for a subtype of recommendations produced for the + // same recommender. Subtype is a function of content and impact, meaning a + // new subtype will be added when either content or primary impact category + // changes. + // + // Examples: + // For recommender = "google.iam.policy.RoleRecommender", + // recommender_subtype can be one of "REMOVE_ROLE"/"REPLACE_ROLE" + string recommender_subtype = 12; + + // Last time this recommendation was refreshed by the system that created it + // in the first place. + google.protobuf.Timestamp last_refresh_time = 4; + + // The primary impact that this recommendation can have while trying to + // optimize for one category. + Impact primary_impact = 5; + + // Optional set of additional impact that this recommendation may have when + // trying to optimize for the primary category. These may be positive + // or negative. + repeated Impact additional_impact = 6; + + // Content of the recommendation describing recommended changes to resources. + RecommendationContent content = 7; + + // Information for state. Contains state and metadata. + RecommendationStateInfo state_info = 10; + + // Fingerprint of the Recommendation. Provides optimistic locking when + // updating states. + string etag = 11; +} + +// Contains what resources are changing and how they are changing. +message RecommendationContent { + // Operations to one or more Google Cloud resources grouped in such a way + // that, all operations within one group are expected to be performed + // atomically and in an order. + repeated OperationGroup operation_groups = 2; +} + +// Group of operations that need to be performed atomically. +message OperationGroup { + // List of operations across one or more resources that belong to this group. + // Loosely based on RFC6902 and should be performed in the order they appear. + repeated Operation operations = 1; +} + +// Contains an operation for a resource inspired by the JSON-PATCH format with +// support for: +// * Custom filters for describing partial array patch. +// * Extended path values for describing nested arrays. +// * Custom fields for describing the resource for which the operation is being +// described. +// * Allows extension to custom operations not natively supported by RFC6902. +// See https://tools.ietf.org/html/rfc6902 for details on the original RFC. +message Operation { + // Type of this operation. Contains one of 'and', 'remove', 'replace', 'move', + // 'copy', 'test' and custom operations. This field is case-insensitive and + // always populated. + string action = 1; + + // Type of GCP resource being modified/tested. This field is always populated. + // Example: cloudresourcemanager.googleapis.com/Project, + // compute.googleapis.com/Instance + string resource_type = 2; + + // Contains the fully qualified resource name. This field is always populated. + // ex: //cloudresourcemanager.googleapis.com/projects/foo. + string resource = 3; + + // Path to the target field being operated on. If the operation is at the + // resource level, then path should be "/". This field is always populated. + string path = 4; + + // Can be set with action 'copy' to copy resource configuration across + // different resources of the same type. Example: A resource clone can be + // done via action = 'copy', path = "/", from = "/", + // source_resource = and resource_name = . + // This field is empty for all other values of `action`. + string source_resource = 5; + + // Can be set with action 'copy' or 'move' to indicate the source field within + // resource or source_resource, ignored if provided for other operation types. + string source_path = 6; + + // Value for the `path` field. Set if action is 'add'/'replace'/'test'. + google.protobuf.Value value = 7; + + // Set of filters to apply if `path` refers to array elements or nested array + // elements in order to narrow down to a single unique element that is being + // tested/modified. + // Note that this is intended to be an exact match per filter. + // Example: { + // "/versions/*/name" : "it-123" + // "/versions/*/targetSize/percent": 20 + // } + // Example: { + // "/bindings/*/role": "roles/admin" + // "/bindings/*/condition" : null + // } + // Example: { + // "/bindings/*/role": "roles/admin" + // "/bindings/*/members/*" : ["x@google.com", "y@google.com"] + // } + map path_filters = 8; +} + +// Contains metadata about how much money a recommendation can save or incur. +message CostProjection { + // An approximate projection on amount saved or amount incurred. Negative cost + // units indicate cost savings and positive cost units indicate increase. + // See google.type.Money documentation for positive/negative units. + google.type.Money cost = 1; + + // Duration for which this cost applies. + google.protobuf.Duration duration = 2; +} + +// Contains the impact a recommendation can have for a given category. +message Impact { + // The category of the impact. + enum Category { + // Default unspecified category. Don't use directly. + CATEGORY_UNSPECIFIED = 0; + + // Indicates a potential increase or decrease in cost. + COST = 1; + + // Indicates a potential increase or decrease in security. + SECURITY = 2; + + // Indicates a potential increase or decrease in performance. + PERFORMANCE = 3; + } + + // Category that is being targeted. + Category category = 1; + + // Contains projections (if any) for this category. + oneof projection { + // Use with CategoryType.COST + CostProjection cost_projection = 100; + } +} + +// Information for state. Contains state and metadata. +message RecommendationStateInfo { + // Represents Recommendation State + enum State { + // Default state. Don't use directly. + STATE_UNSPECIFIED = 0; + + // Recommendation is active and can be applied. Recommendations content can + // be updated by Google. + // + // ACTIVE recommendations can be marked as CLAIMED, SUCCEEDED, or FAILED. + ACTIVE = 1; + + // Recommendation is in claimed state. Recommendations content is + // immutable and cannot be updated by Google. + // + // CLAIMED recommendations can be marked as CLAIMED, SUCCEEDED, or FAILED. + CLAIMED = 6; + + // Recommendation is in succeeded state. Recommendations content is + // immutable and cannot be updated by Google. + // + // SUCCEEDED recommendations can be marked as SUCCEEDED, or FAILED. + SUCCEEDED = 3; + + // Recommendation is in failed state. Recommendations content is immutable + // and cannot be updated by Google. + // + // FAILED recommendations can be marked as SUCCEEDED, or FAILED. + FAILED = 4; + + // Recommendation is in dismissed state. Recommendation content can be + // updated by Google. + // + // DISMISSED recommendations can be marked as ACTIVE. + DISMISSED = 5; + } + + // The state of the recommendation, Eg ACTIVE, SUCCEEDED, FAILED. + State state = 1; + + // A map of metadata for the state, provided by user or automations systems. + map state_metadata = 2; +} diff --git a/recommender/google/cloud/recommender_v1beta1/proto/recommendation_pb2.py b/recommender/google/cloud/recommender_v1beta1/proto/recommendation_pb2.py new file mode 100644 index 000000000000..e7d5913a69a3 --- /dev/null +++ b/recommender/google/cloud/recommender_v1beta1/proto/recommendation_pb2.py @@ -0,0 +1,1119 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/recommender_v1beta1/proto/recommendation.proto + +import sys + +_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 +from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from google.type import money_pb2 as google_dot_type_dot_money__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name="google/cloud/recommender_v1beta1/proto/recommendation.proto", + package="google.cloud.recommender.v1beta1", + syntax="proto3", + serialized_options=_b( + "\n$com.google.cloud.recommender.v1beta1P\001ZKgoogle.golang.org/genproto/googleapis/cloud/recommender/v1beta1;recommender\242\002\004CREC\252\002 Google.Cloud.Recommender.V1Beta1" + ), + serialized_pb=_b( + '\n;google/cloud/recommender_v1beta1/proto/recommendation.proto\x12 google.cloud.recommender.v1beta1\x1a\x1egoogle/protobuf/duration.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/type/money.proto"\xb5\x03\n\x0eRecommendation\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x1b\n\x13recommender_subtype\x18\x0c \x01(\t\x12\x35\n\x11last_refresh_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12@\n\x0eprimary_impact\x18\x05 \x01(\x0b\x32(.google.cloud.recommender.v1beta1.Impact\x12\x43\n\x11\x61\x64\x64itional_impact\x18\x06 \x03(\x0b\x32(.google.cloud.recommender.v1beta1.Impact\x12H\n\x07\x63ontent\x18\x07 \x01(\x0b\x32\x37.google.cloud.recommender.v1beta1.RecommendationContent\x12M\n\nstate_info\x18\n \x01(\x0b\x32\x39.google.cloud.recommender.v1beta1.RecommendationStateInfo\x12\x0c\n\x04\x65tag\x18\x0b \x01(\t"c\n\x15RecommendationContent\x12J\n\x10operation_groups\x18\x02 \x03(\x0b\x32\x30.google.cloud.recommender.v1beta1.OperationGroup"Q\n\x0eOperationGroup\x12?\n\noperations\x18\x01 \x03(\x0b\x32+.google.cloud.recommender.v1beta1.Operation"\xc7\x02\n\tOperation\x12\x0e\n\x06\x61\x63tion\x18\x01 \x01(\t\x12\x15\n\rresource_type\x18\x02 \x01(\t\x12\x10\n\x08resource\x18\x03 \x01(\t\x12\x0c\n\x04path\x18\x04 \x01(\t\x12\x17\n\x0fsource_resource\x18\x05 \x01(\t\x12\x13\n\x0bsource_path\x18\x06 \x01(\t\x12%\n\x05value\x18\x07 \x01(\x0b\x32\x16.google.protobuf.Value\x12R\n\x0cpath_filters\x18\x08 \x03(\x0b\x32<.google.cloud.recommender.v1beta1.Operation.PathFiltersEntry\x1aJ\n\x10PathFiltersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12%\n\x05value\x18\x02 \x01(\x0b\x32\x16.google.protobuf.Value:\x02\x38\x01"_\n\x0e\x43ostProjection\x12 \n\x04\x63ost\x18\x01 \x01(\x0b\x32\x12.google.type.Money\x12+\n\x08\x64uration\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration"\xf7\x01\n\x06Impact\x12\x43\n\x08\x63\x61tegory\x18\x01 \x01(\x0e\x32\x31.google.cloud.recommender.v1beta1.Impact.Category\x12K\n\x0f\x63ost_projection\x18\x64 \x01(\x0b\x32\x30.google.cloud.recommender.v1beta1.CostProjectionH\x00"M\n\x08\x43\x61tegory\x12\x18\n\x14\x43\x41TEGORY_UNSPECIFIED\x10\x00\x12\x08\n\x04\x43OST\x10\x01\x12\x0c\n\x08SECURITY\x10\x02\x12\x0f\n\x0bPERFORMANCE\x10\x03\x42\x0c\n\nprojection"\xe8\x02\n\x17RecommendationStateInfo\x12N\n\x05state\x18\x01 \x01(\x0e\x32?.google.cloud.recommender.v1beta1.RecommendationStateInfo.State\x12\x64\n\x0estate_metadata\x18\x02 \x03(\x0b\x32L.google.cloud.recommender.v1beta1.RecommendationStateInfo.StateMetadataEntry\x1a\x34\n\x12StateMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"a\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\n\n\x06\x41\x43TIVE\x10\x01\x12\x0b\n\x07\x43LAIMED\x10\x06\x12\r\n\tSUCCEEDED\x10\x03\x12\n\n\x06\x46\x41ILED\x10\x04\x12\r\n\tDISMISSED\x10\x05\x42\x9f\x01\n$com.google.cloud.recommender.v1beta1P\x01ZKgoogle.golang.org/genproto/googleapis/cloud/recommender/v1beta1;recommender\xa2\x02\x04\x43REC\xaa\x02 Google.Cloud.Recommender.V1Beta1b\x06proto3' + ), + dependencies=[ + google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, + google_dot_protobuf_dot_struct__pb2.DESCRIPTOR, + google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, + google_dot_type_dot_money__pb2.DESCRIPTOR, + ], +) + + +_IMPACT_CATEGORY = _descriptor.EnumDescriptor( + name="Category", + full_name="google.cloud.recommender.v1beta1.Impact.Category", + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name="CATEGORY_UNSPECIFIED", + index=0, + number=0, + serialized_options=None, + type=None, + ), + _descriptor.EnumValueDescriptor( + name="COST", index=1, number=1, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="SECURITY", index=2, number=2, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="PERFORMANCE", index=3, number=3, serialized_options=None, type=None + ), + ], + containing_type=None, + serialized_options=None, + serialized_start=1425, + serialized_end=1502, +) +_sym_db.RegisterEnumDescriptor(_IMPACT_CATEGORY) + +_RECOMMENDATIONSTATEINFO_STATE = _descriptor.EnumDescriptor( + name="State", + full_name="google.cloud.recommender.v1beta1.RecommendationStateInfo.State", + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name="STATE_UNSPECIFIED", + index=0, + number=0, + serialized_options=None, + type=None, + ), + _descriptor.EnumValueDescriptor( + name="ACTIVE", index=1, number=1, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="CLAIMED", index=2, number=6, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="SUCCEEDED", index=3, number=3, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="FAILED", index=4, number=4, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="DISMISSED", index=5, number=5, serialized_options=None, type=None + ), + ], + containing_type=None, + serialized_options=None, + serialized_start=1782, + serialized_end=1879, +) +_sym_db.RegisterEnumDescriptor(_RECOMMENDATIONSTATEINFO_STATE) + + +_RECOMMENDATION = _descriptor.Descriptor( + name="Recommendation", + full_name="google.cloud.recommender.v1beta1.Recommendation", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="name", + full_name="google.cloud.recommender.v1beta1.Recommendation.name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="description", + full_name="google.cloud.recommender.v1beta1.Recommendation.description", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="recommender_subtype", + full_name="google.cloud.recommender.v1beta1.Recommendation.recommender_subtype", + index=2, + number=12, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="last_refresh_time", + full_name="google.cloud.recommender.v1beta1.Recommendation.last_refresh_time", + index=3, + number=4, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="primary_impact", + full_name="google.cloud.recommender.v1beta1.Recommendation.primary_impact", + index=4, + number=5, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="additional_impact", + full_name="google.cloud.recommender.v1beta1.Recommendation.additional_impact", + index=5, + number=6, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="content", + full_name="google.cloud.recommender.v1beta1.Recommendation.content", + index=6, + number=7, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="state_info", + full_name="google.cloud.recommender.v1beta1.Recommendation.state_info", + index=7, + number=10, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="etag", + full_name="google.cloud.recommender.v1beta1.Recommendation.etag", + index=8, + number=11, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=218, + serialized_end=655, +) + + +_RECOMMENDATIONCONTENT = _descriptor.Descriptor( + name="RecommendationContent", + full_name="google.cloud.recommender.v1beta1.RecommendationContent", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="operation_groups", + full_name="google.cloud.recommender.v1beta1.RecommendationContent.operation_groups", + index=0, + number=2, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=657, + serialized_end=756, +) + + +_OPERATIONGROUP = _descriptor.Descriptor( + name="OperationGroup", + full_name="google.cloud.recommender.v1beta1.OperationGroup", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="operations", + full_name="google.cloud.recommender.v1beta1.OperationGroup.operations", + index=0, + number=1, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=758, + serialized_end=839, +) + + +_OPERATION_PATHFILTERSENTRY = _descriptor.Descriptor( + name="PathFiltersEntry", + full_name="google.cloud.recommender.v1beta1.Operation.PathFiltersEntry", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="key", + full_name="google.cloud.recommender.v1beta1.Operation.PathFiltersEntry.key", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="value", + full_name="google.cloud.recommender.v1beta1.Operation.PathFiltersEntry.value", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=_b("8\001"), + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1095, + serialized_end=1169, +) + +_OPERATION = _descriptor.Descriptor( + name="Operation", + full_name="google.cloud.recommender.v1beta1.Operation", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="action", + full_name="google.cloud.recommender.v1beta1.Operation.action", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="resource_type", + full_name="google.cloud.recommender.v1beta1.Operation.resource_type", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="resource", + full_name="google.cloud.recommender.v1beta1.Operation.resource", + index=2, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="path", + full_name="google.cloud.recommender.v1beta1.Operation.path", + index=3, + number=4, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="source_resource", + full_name="google.cloud.recommender.v1beta1.Operation.source_resource", + index=4, + number=5, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="source_path", + full_name="google.cloud.recommender.v1beta1.Operation.source_path", + index=5, + number=6, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="value", + full_name="google.cloud.recommender.v1beta1.Operation.value", + index=6, + number=7, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="path_filters", + full_name="google.cloud.recommender.v1beta1.Operation.path_filters", + index=7, + number=8, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[_OPERATION_PATHFILTERSENTRY], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=842, + serialized_end=1169, +) + + +_COSTPROJECTION = _descriptor.Descriptor( + name="CostProjection", + full_name="google.cloud.recommender.v1beta1.CostProjection", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="cost", + full_name="google.cloud.recommender.v1beta1.CostProjection.cost", + index=0, + number=1, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="duration", + full_name="google.cloud.recommender.v1beta1.CostProjection.duration", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1171, + serialized_end=1266, +) + + +_IMPACT = _descriptor.Descriptor( + name="Impact", + full_name="google.cloud.recommender.v1beta1.Impact", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="category", + full_name="google.cloud.recommender.v1beta1.Impact.category", + index=0, + number=1, + type=14, + cpp_type=8, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="cost_projection", + full_name="google.cloud.recommender.v1beta1.Impact.cost_projection", + index=1, + number=100, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[_IMPACT_CATEGORY], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name="projection", + full_name="google.cloud.recommender.v1beta1.Impact.projection", + index=0, + containing_type=None, + fields=[], + ) + ], + serialized_start=1269, + serialized_end=1516, +) + + +_RECOMMENDATIONSTATEINFO_STATEMETADATAENTRY = _descriptor.Descriptor( + name="StateMetadataEntry", + full_name="google.cloud.recommender.v1beta1.RecommendationStateInfo.StateMetadataEntry", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="key", + full_name="google.cloud.recommender.v1beta1.RecommendationStateInfo.StateMetadataEntry.key", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="value", + full_name="google.cloud.recommender.v1beta1.RecommendationStateInfo.StateMetadataEntry.value", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=_b("8\001"), + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1728, + serialized_end=1780, +) + +_RECOMMENDATIONSTATEINFO = _descriptor.Descriptor( + name="RecommendationStateInfo", + full_name="google.cloud.recommender.v1beta1.RecommendationStateInfo", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="state", + full_name="google.cloud.recommender.v1beta1.RecommendationStateInfo.state", + index=0, + number=1, + type=14, + cpp_type=8, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="state_metadata", + full_name="google.cloud.recommender.v1beta1.RecommendationStateInfo.state_metadata", + index=1, + number=2, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[_RECOMMENDATIONSTATEINFO_STATEMETADATAENTRY], + enum_types=[_RECOMMENDATIONSTATEINFO_STATE], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1519, + serialized_end=1879, +) + +_RECOMMENDATION.fields_by_name[ + "last_refresh_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_RECOMMENDATION.fields_by_name["primary_impact"].message_type = _IMPACT +_RECOMMENDATION.fields_by_name["additional_impact"].message_type = _IMPACT +_RECOMMENDATION.fields_by_name["content"].message_type = _RECOMMENDATIONCONTENT +_RECOMMENDATION.fields_by_name["state_info"].message_type = _RECOMMENDATIONSTATEINFO +_RECOMMENDATIONCONTENT.fields_by_name["operation_groups"].message_type = _OPERATIONGROUP +_OPERATIONGROUP.fields_by_name["operations"].message_type = _OPERATION +_OPERATION_PATHFILTERSENTRY.fields_by_name[ + "value" +].message_type = google_dot_protobuf_dot_struct__pb2._VALUE +_OPERATION_PATHFILTERSENTRY.containing_type = _OPERATION +_OPERATION.fields_by_name[ + "value" +].message_type = google_dot_protobuf_dot_struct__pb2._VALUE +_OPERATION.fields_by_name["path_filters"].message_type = _OPERATION_PATHFILTERSENTRY +_COSTPROJECTION.fields_by_name[ + "cost" +].message_type = google_dot_type_dot_money__pb2._MONEY +_COSTPROJECTION.fields_by_name[ + "duration" +].message_type = google_dot_protobuf_dot_duration__pb2._DURATION +_IMPACT.fields_by_name["category"].enum_type = _IMPACT_CATEGORY +_IMPACT.fields_by_name["cost_projection"].message_type = _COSTPROJECTION +_IMPACT_CATEGORY.containing_type = _IMPACT +_IMPACT.oneofs_by_name["projection"].fields.append( + _IMPACT.fields_by_name["cost_projection"] +) +_IMPACT.fields_by_name["cost_projection"].containing_oneof = _IMPACT.oneofs_by_name[ + "projection" +] +_RECOMMENDATIONSTATEINFO_STATEMETADATAENTRY.containing_type = _RECOMMENDATIONSTATEINFO +_RECOMMENDATIONSTATEINFO.fields_by_name[ + "state" +].enum_type = _RECOMMENDATIONSTATEINFO_STATE +_RECOMMENDATIONSTATEINFO.fields_by_name[ + "state_metadata" +].message_type = _RECOMMENDATIONSTATEINFO_STATEMETADATAENTRY +_RECOMMENDATIONSTATEINFO_STATE.containing_type = _RECOMMENDATIONSTATEINFO +DESCRIPTOR.message_types_by_name["Recommendation"] = _RECOMMENDATION +DESCRIPTOR.message_types_by_name["RecommendationContent"] = _RECOMMENDATIONCONTENT +DESCRIPTOR.message_types_by_name["OperationGroup"] = _OPERATIONGROUP +DESCRIPTOR.message_types_by_name["Operation"] = _OPERATION +DESCRIPTOR.message_types_by_name["CostProjection"] = _COSTPROJECTION +DESCRIPTOR.message_types_by_name["Impact"] = _IMPACT +DESCRIPTOR.message_types_by_name["RecommendationStateInfo"] = _RECOMMENDATIONSTATEINFO +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +Recommendation = _reflection.GeneratedProtocolMessageType( + "Recommendation", + (_message.Message,), + dict( + DESCRIPTOR=_RECOMMENDATION, + __module__="google.cloud.recommender_v1beta1.proto.recommendation_pb2", + __doc__="""A recommendation along with a suggested action. E.g., a rightsizing + recommendation for an underutilized VM, IAM role recommendations, etc + + + Attributes: + name: + Name of recommendation. A project recommendation is + represented as projects/[PROJECT\_NUMBER]/locations/[LOCATION] + /recommenders/[RECOMMENDER\_ID]/recommendations/[RECOMMENDATIO + N\_ID] + description: + Free-form human readable summary in English. The maximum + length is 500 characters. + recommender_subtype: + Contains an identifier for a subtype of recommendations + produced for the same recommender. Subtype is a function of + content and impact, meaning a new subtype will be added when + either content or primary impact category changes. Examples: + For recommender = "google.iam.policy.RoleRecommender", + recommender\_subtype can be one of + "REMOVE\_ROLE"/"REPLACE\_ROLE" + last_refresh_time: + Last time this recommendation was refreshed by the system that + created it in the first place. + primary_impact: + The primary impact that this recommendation can have while + trying to optimize for one category. + additional_impact: + Optional set of additional impact that this recommendation may + have when trying to optimize for the primary category. These + may be positive or negative. + content: + Content of the recommendation describing recommended changes + to resources. + state_info: + Information for state. Contains state and metadata. + etag: + Fingerprint of the Recommendation. Provides optimistic locking + when updating states. + """, + # @@protoc_insertion_point(class_scope:google.cloud.recommender.v1beta1.Recommendation) + ), +) +_sym_db.RegisterMessage(Recommendation) + +RecommendationContent = _reflection.GeneratedProtocolMessageType( + "RecommendationContent", + (_message.Message,), + dict( + DESCRIPTOR=_RECOMMENDATIONCONTENT, + __module__="google.cloud.recommender_v1beta1.proto.recommendation_pb2", + __doc__="""Contains what resources are changing and how they are changing. + + + Attributes: + operation_groups: + Operations to one or more Google Cloud resources grouped in + such a way that, all operations within one group are expected + to be performed atomically and in an order. + """, + # @@protoc_insertion_point(class_scope:google.cloud.recommender.v1beta1.RecommendationContent) + ), +) +_sym_db.RegisterMessage(RecommendationContent) + +OperationGroup = _reflection.GeneratedProtocolMessageType( + "OperationGroup", + (_message.Message,), + dict( + DESCRIPTOR=_OPERATIONGROUP, + __module__="google.cloud.recommender_v1beta1.proto.recommendation_pb2", + __doc__="""Group of operations that need to be performed atomically. + + + Attributes: + operations: + List of operations across one or more resources that belong to + this group. Loosely based on RFC6902 and should be performed + in the order they appear. + """, + # @@protoc_insertion_point(class_scope:google.cloud.recommender.v1beta1.OperationGroup) + ), +) +_sym_db.RegisterMessage(OperationGroup) + +Operation = _reflection.GeneratedProtocolMessageType( + "Operation", + (_message.Message,), + dict( + PathFiltersEntry=_reflection.GeneratedProtocolMessageType( + "PathFiltersEntry", + (_message.Message,), + dict( + DESCRIPTOR=_OPERATION_PATHFILTERSENTRY, + __module__="google.cloud.recommender_v1beta1.proto.recommendation_pb2" + # @@protoc_insertion_point(class_scope:google.cloud.recommender.v1beta1.Operation.PathFiltersEntry) + ), + ), + DESCRIPTOR=_OPERATION, + __module__="google.cloud.recommender_v1beta1.proto.recommendation_pb2", + __doc__="""Contains an operation for a resource inspired by the JSON-PATCH format + with support for: \* Custom filters for describing partial array patch. + \* Extended path values for describing nested arrays. \* Custom fields + for describing the resource for which the operation is being described. + \* Allows extension to custom operations not natively supported by + RFC6902. See https://tools.ietf.org/html/rfc6902 for details on the + original RFC. + + + Attributes: + action: + Type of this operation. Contains one of 'and', 'remove', + 'replace', 'move', 'copy', 'test' and custom operations. This + field is case-insensitive and always populated. + resource_type: + Type of GCP resource being modified/tested. This field is + always populated. Example: + cloudresourcemanager.googleapis.com/Project, + compute.googleapis.com/Instance + resource: + Contains the fully qualified resource name. This field is + always populated. ex: + //cloudresourcemanager.googleapis.com/projects/foo. + path: + Path to the target field being operated on. If the operation + is at the resource level, then path should be "/". This field + is always populated. + source_resource: + Can be set with action 'copy' to copy resource configuration + across different resources of the same type. Example: A + resource clone can be done via action = 'copy', path = "/", + from = "/", source\_resource = and resource\_name = . This + field is empty for all other values of ``action``. + source_path: + Can be set with action 'copy' or 'move' to indicate the source + field within resource or source\_resource, ignored if provided + for other operation types. + value: + Value for the ``path`` field. Set if action is + 'add'/'replace'/'test'. + path_filters: + Set of filters to apply if ``path`` refers to array elements + or nested array elements in order to narrow down to a single + unique element that is being tested/modified. Note that this + is intended to be an exact match per filter. Example: { + "/versions/*/name" : "it-123" + "/versions/*/targetSize/percent": 20 } Example: { + "/bindings/*/role": "roles/admin" "/bindings/*/condition" : + null } Example: { "/bindings/*/role": "roles/admin" + "/bindings/*/members/\*" : ["x@google.com", "y@google.com"] } + """, + # @@protoc_insertion_point(class_scope:google.cloud.recommender.v1beta1.Operation) + ), +) +_sym_db.RegisterMessage(Operation) +_sym_db.RegisterMessage(Operation.PathFiltersEntry) + +CostProjection = _reflection.GeneratedProtocolMessageType( + "CostProjection", + (_message.Message,), + dict( + DESCRIPTOR=_COSTPROJECTION, + __module__="google.cloud.recommender_v1beta1.proto.recommendation_pb2", + __doc__="""Contains metadata about how much money a recommendation can save or + incur. + + + Attributes: + cost: + An approximate projection on amount saved or amount incurred. + Negative cost units indicate cost savings and positive cost + units indicate increase. See google.type.Money documentation + for positive/negative units. + duration: + Duration for which this cost applies. + """, + # @@protoc_insertion_point(class_scope:google.cloud.recommender.v1beta1.CostProjection) + ), +) +_sym_db.RegisterMessage(CostProjection) + +Impact = _reflection.GeneratedProtocolMessageType( + "Impact", + (_message.Message,), + dict( + DESCRIPTOR=_IMPACT, + __module__="google.cloud.recommender_v1beta1.proto.recommendation_pb2", + __doc__="""Contains the impact a recommendation can have for a given category. + + + Attributes: + category: + Category that is being targeted. + projection: + Contains projections (if any) for this category. + cost_projection: + Use with CategoryType.COST + """, + # @@protoc_insertion_point(class_scope:google.cloud.recommender.v1beta1.Impact) + ), +) +_sym_db.RegisterMessage(Impact) + +RecommendationStateInfo = _reflection.GeneratedProtocolMessageType( + "RecommendationStateInfo", + (_message.Message,), + dict( + StateMetadataEntry=_reflection.GeneratedProtocolMessageType( + "StateMetadataEntry", + (_message.Message,), + dict( + DESCRIPTOR=_RECOMMENDATIONSTATEINFO_STATEMETADATAENTRY, + __module__="google.cloud.recommender_v1beta1.proto.recommendation_pb2" + # @@protoc_insertion_point(class_scope:google.cloud.recommender.v1beta1.RecommendationStateInfo.StateMetadataEntry) + ), + ), + DESCRIPTOR=_RECOMMENDATIONSTATEINFO, + __module__="google.cloud.recommender_v1beta1.proto.recommendation_pb2", + __doc__="""Information for state. Contains state and metadata. + + + Attributes: + state: + The state of the recommendation, Eg ACTIVE, SUCCEEDED, FAILED. + state_metadata: + A map of metadata for the state, provided by user or + automations systems. + """, + # @@protoc_insertion_point(class_scope:google.cloud.recommender.v1beta1.RecommendationStateInfo) + ), +) +_sym_db.RegisterMessage(RecommendationStateInfo) +_sym_db.RegisterMessage(RecommendationStateInfo.StateMetadataEntry) + + +DESCRIPTOR._options = None +_OPERATION_PATHFILTERSENTRY._options = None +_RECOMMENDATIONSTATEINFO_STATEMETADATAENTRY._options = None +# @@protoc_insertion_point(module_scope) diff --git a/recommender/google/cloud/recommender_v1beta1/proto/recommendation_pb2_grpc.py b/recommender/google/cloud/recommender_v1beta1/proto/recommendation_pb2_grpc.py new file mode 100644 index 000000000000..07cb78fe03a9 --- /dev/null +++ b/recommender/google/cloud/recommender_v1beta1/proto/recommendation_pb2_grpc.py @@ -0,0 +1,2 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +import grpc diff --git a/recommender/google/cloud/recommender_v1beta1/proto/recommender_service.proto b/recommender/google/cloud/recommender_v1beta1/proto/recommender_service.proto new file mode 100644 index 000000000000..49a076ae74f6 --- /dev/null +++ b/recommender/google/cloud/recommender_v1beta1/proto/recommender_service.proto @@ -0,0 +1,190 @@ +// Copyright 2019 Google LLC. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +syntax = "proto3"; + +package google.cloud.recommender.v1beta1; + +import "google/api/annotations.proto"; +import "google/cloud/recommender/v1beta1/recommendation.proto"; +import "google/longrunning/operations.proto"; +import "google/api/client.proto"; + +option csharp_namespace = "Google.Cloud.Recommmender.V1Beta1"; +option go_package = "google.golang.org/genproto/googleapis/cloud/recommender/v1beta1;recommender"; +option java_multiple_files = true; +option java_outer_classname = "RecommenderProto"; +option java_package = "com.google.cloud.recommender.v1beta1"; +option objc_class_prefix = "CREC"; + +// Provides recommendations for cloud customers for various categories like +// performance optimization, cost savings, reliability, feature discovery, etc. +// These recommendations are generated automatically based on analysis of user +// resources, configuration and monitoring metrics. +service Recommender { + option (google.api.default_host) = "recommender.googleapis.com"; + option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; + + // Lists recommendations for a Cloud project. Requires the recommender.*.list + // IAM permission for the specified recommender. + rpc ListRecommendations(ListRecommendationsRequest) returns (ListRecommendationsResponse) { + option (google.api.http) = { + get: "/v1beta1/{parent=projects/*/locations/*/recommenders/*}/recommendations" + }; + } + + // Gets the requested recommendation. Requires the recommender.*.get + // IAM permission for the specified recommender. + rpc GetRecommendation(GetRecommendationRequest) returns (Recommendation) { + option (google.api.http) = { + get: "/v1beta1/{name=projects/*/locations/*/recommenders/*/recommendations/*}" + }; + } + + // Mark the Recommendation State as Claimed. Users can use this method to + // indicate to the Recommender API that they are starting to apply the + // recommendation themselves. This stops the recommendation content from being + // updated. + // + // MarkRecommendationClaimed can be applied to recommendations in CLAIMED, + // SUCCEEDED, FAILED, or ACTIVE state. + // + // Requires the recommender.*.update IAM permission for the specified + // recommender. + rpc MarkRecommendationClaimed(MarkRecommendationClaimedRequest) returns (Recommendation) { + option (google.api.http) = { + post: "/v1beta1/{name=projects/*/locations/*/recommenders/*/recommendations/*}:markClaimed" + body: "*" + }; + } + + // Mark the Recommendation State as Succeeded. Users can use this method to + // indicate to the Recommender API that they have applied the recommendation + // themselves, and the operation was successful. This stops the recommendation + // content from being updated. + // + // MarkRecommendationSucceeded can be applied to recommendations in ACTIVE, + // CLAIMED, SUCCEEDED, or FAILED state. + // + // Requires the recommender.*.update IAM permission for the specified + // recommender. + rpc MarkRecommendationSucceeded(MarkRecommendationSucceededRequest) returns (Recommendation) { + option (google.api.http) = { + post: "/v1beta1/{name=projects/*/locations/*/recommenders/*/recommendations/*}:markSucceeded" + body: "*" + }; + } + + // Mark the Recommendation State as Failed. Users can use this method to + // indicate to the Recommender API that they have applied the recommendation + // themselves, and the operation failed. This stops the recommendation content + // from being updated. + // + // MarkRecommendationFailed can be applied to recommendations in ACTIVE, + // CLAIMED, SUCCEEDED, or FAILED state. + // + // Requires the recommender.*.update IAM permission for the specified + // recommender. + rpc MarkRecommendationFailed(MarkRecommendationFailedRequest) returns (Recommendation) { + option (google.api.http) = { + post: "/v1beta1/{name=projects/*/locations/*/recommenders/*/recommendations/*}:markFailed" + body: "*" + }; + } +} + +// Request for the `ListRecommendations` method. +message ListRecommendationsRequest { + // Required. The container resource on which to execute the request. + // Acceptable formats: + // + // 1. + // "projects/[PROJECT_NUMBER]/locations/[LOCATION]/recommenders/[RECOMMENDER_ID]", + // + // LOCATION here refers to GCP Locations: + // https://cloud.google.com/about/locations/ + string parent = 1; + + // Optional. The maximum number of results to return from this request. + // Non-positive values are ignored. If not specified, the server will + // determine the number of results to return. + int32 page_size = 2; + + // Optional. If present, retrieves the next batch of results from the + // preceding call to this method. `page_token` must be the value of + // `next_page_token` from the previous response. The values of other method + // parameters must be identical to those in the previous call. + string page_token = 3; + + // Filter expression to restrict the recommendations returned. Supported + // filter fields: state_info.state + // Eg: `state_info.state:"DISMISSED" or state_info.state:"FAILED" + string filter = 5; +} + +// Response to the `ListRecommendations` method. +message ListRecommendationsResponse { + // The set of recommendations for the `parent` resource. + repeated Recommendation recommendations = 1; + + // A token that can be used to request the next page of results. This field is + // empty if there are no additional results. + string next_page_token = 2; +} + +// Request to the `GetRecommendation` method. +message GetRecommendationRequest { + // Name of the recommendation. + string name = 1; +} + +// Request for the `MarkRecommendationClaimed` Method. +message MarkRecommendationClaimedRequest { + // Name of the recommendation. + string name = 1; + + // State properties to include with this state. Overwrites any existing + // `state_metadata`. + map state_metadata = 2; + + // Fingerprint of the Recommendation. Provides optimistic locking. + string etag = 3; +} + +// Request for the `MarkRecommendationSucceeded` Method. +message MarkRecommendationSucceededRequest { + // Name of the recommendation. + string name = 1; + + // State properties to include with this state. Overwrites any existing + // `state_metadata`. + map state_metadata = 2; + + // Fingerprint of the Recommendation. Provides optimistic locking. + string etag = 3; +} + +// Request for the `MarkRecommendationFailed` Method. +message MarkRecommendationFailedRequest { + // Name of the recommendation. + string name = 1; + + // State properties to include with this state. Overwrites any existing + // `state_metadata`. + map state_metadata = 2; + + // Fingerprint of the Recommendation. Provides optimistic locking. + string etag = 3; +} diff --git a/recommender/google/cloud/recommender_v1beta1/proto/recommender_service_pb2.py b/recommender/google/cloud/recommender_v1beta1/proto/recommender_service_pb2.py new file mode 100644 index 000000000000..5e7d8f029fd3 --- /dev/null +++ b/recommender/google/cloud/recommender_v1beta1/proto/recommender_service_pb2.py @@ -0,0 +1,924 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/recommender_v1beta1/proto/recommender_service.proto + +import sys + +_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.cloud.recommender_v1beta1.proto import ( + recommendation_pb2 as google_dot_cloud_dot_recommender__v1beta1_dot_proto_dot_recommendation__pb2, +) +from google.longrunning import ( + operations_pb2 as google_dot_longrunning_dot_operations__pb2, +) +from google.api import client_pb2 as google_dot_api_dot_client__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name="google/cloud/recommender_v1beta1/proto/recommender_service.proto", + package="google.cloud.recommender.v1beta1", + syntax="proto3", + serialized_options=_b( + "\n$com.google.cloud.recommender.v1beta1B\020RecommenderProtoP\001ZKgoogle.golang.org/genproto/googleapis/cloud/recommender/v1beta1;recommender\242\002\004CREC\252\002!Google.Cloud.Recommmender.V1Beta1" + ), + serialized_pb=_b( + '\n@google/cloud/recommender_v1beta1/proto/recommender_service.proto\x12 google.cloud.recommender.v1beta1\x1a\x1cgoogle/api/annotations.proto\x1a;google/cloud/recommender_v1beta1/proto/recommendation.proto\x1a#google/longrunning/operations.proto\x1a\x17google/api/client.proto"c\n\x1aListRecommendationsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x05 \x01(\t"\x81\x01\n\x1bListRecommendationsResponse\x12I\n\x0frecommendations\x18\x01 \x03(\x0b\x32\x30.google.cloud.recommender.v1beta1.Recommendation\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"(\n\x18GetRecommendationRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"\xe3\x01\n MarkRecommendationClaimedRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12m\n\x0estate_metadata\x18\x02 \x03(\x0b\x32U.google.cloud.recommender.v1beta1.MarkRecommendationClaimedRequest.StateMetadataEntry\x12\x0c\n\x04\x65tag\x18\x03 \x01(\t\x1a\x34\n\x12StateMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xe7\x01\n"MarkRecommendationSucceededRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12o\n\x0estate_metadata\x18\x02 \x03(\x0b\x32W.google.cloud.recommender.v1beta1.MarkRecommendationSucceededRequest.StateMetadataEntry\x12\x0c\n\x04\x65tag\x18\x03 \x01(\t\x1a\x34\n\x12StateMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xe1\x01\n\x1fMarkRecommendationFailedRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12l\n\x0estate_metadata\x18\x02 \x03(\x0b\x32T.google.cloud.recommender.v1beta1.MarkRecommendationFailedRequest.StateMetadataEntry\x12\x0c\n\x04\x65tag\x18\x03 \x01(\t\x1a\x34\n\x12StateMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x32\xf7\t\n\x0bRecommender\x12\xe3\x01\n\x13ListRecommendations\x12<.google.cloud.recommender.v1beta1.ListRecommendationsRequest\x1a=.google.cloud.recommender.v1beta1.ListRecommendationsResponse"O\x82\xd3\xe4\x93\x02I\x12G/v1beta1/{parent=projects/*/locations/*/recommenders/*}/recommendations\x12\xd2\x01\n\x11GetRecommendation\x12:.google.cloud.recommender.v1beta1.GetRecommendationRequest\x1a\x30.google.cloud.recommender.v1beta1.Recommendation"O\x82\xd3\xe4\x93\x02I\x12G/v1beta1/{name=projects/*/locations/*/recommenders/*/recommendations/*}\x12\xf1\x01\n\x19MarkRecommendationClaimed\x12\x42.google.cloud.recommender.v1beta1.MarkRecommendationClaimedRequest\x1a\x30.google.cloud.recommender.v1beta1.Recommendation"^\x82\xd3\xe4\x93\x02X"S/v1beta1/{name=projects/*/locations/*/recommenders/*/recommendations/*}:markClaimed:\x01*\x12\xf7\x01\n\x1bMarkRecommendationSucceeded\x12\x44.google.cloud.recommender.v1beta1.MarkRecommendationSucceededRequest\x1a\x30.google.cloud.recommender.v1beta1.Recommendation"`\x82\xd3\xe4\x93\x02Z"U/v1beta1/{name=projects/*/locations/*/recommenders/*/recommendations/*}:markSucceeded:\x01*\x12\xee\x01\n\x18MarkRecommendationFailed\x12\x41.google.cloud.recommender.v1beta1.MarkRecommendationFailedRequest\x1a\x30.google.cloud.recommender.v1beta1.Recommendation"]\x82\xd3\xe4\x93\x02W"R/v1beta1/{name=projects/*/locations/*/recommenders/*/recommendations/*}:markFailed:\x01*\x1aN\xca\x41\x1arecommender.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformB\xb2\x01\n$com.google.cloud.recommender.v1beta1B\x10RecommenderProtoP\x01ZKgoogle.golang.org/genproto/googleapis/cloud/recommender/v1beta1;recommender\xa2\x02\x04\x43REC\xaa\x02!Google.Cloud.Recommmender.V1Beta1b\x06proto3' + ), + dependencies=[ + google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_cloud_dot_recommender__v1beta1_dot_proto_dot_recommendation__pb2.DESCRIPTOR, + google_dot_longrunning_dot_operations__pb2.DESCRIPTOR, + google_dot_api_dot_client__pb2.DESCRIPTOR, + ], +) + + +_LISTRECOMMENDATIONSREQUEST = _descriptor.Descriptor( + name="ListRecommendationsRequest", + full_name="google.cloud.recommender.v1beta1.ListRecommendationsRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="parent", + full_name="google.cloud.recommender.v1beta1.ListRecommendationsRequest.parent", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="page_size", + full_name="google.cloud.recommender.v1beta1.ListRecommendationsRequest.page_size", + index=1, + number=2, + type=5, + cpp_type=1, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="page_token", + full_name="google.cloud.recommender.v1beta1.ListRecommendationsRequest.page_token", + index=2, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="filter", + full_name="google.cloud.recommender.v1beta1.ListRecommendationsRequest.filter", + index=3, + number=5, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=255, + serialized_end=354, +) + + +_LISTRECOMMENDATIONSRESPONSE = _descriptor.Descriptor( + name="ListRecommendationsResponse", + full_name="google.cloud.recommender.v1beta1.ListRecommendationsResponse", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="recommendations", + full_name="google.cloud.recommender.v1beta1.ListRecommendationsResponse.recommendations", + index=0, + number=1, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="next_page_token", + full_name="google.cloud.recommender.v1beta1.ListRecommendationsResponse.next_page_token", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=357, + serialized_end=486, +) + + +_GETRECOMMENDATIONREQUEST = _descriptor.Descriptor( + name="GetRecommendationRequest", + full_name="google.cloud.recommender.v1beta1.GetRecommendationRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="name", + full_name="google.cloud.recommender.v1beta1.GetRecommendationRequest.name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=488, + serialized_end=528, +) + + +_MARKRECOMMENDATIONCLAIMEDREQUEST_STATEMETADATAENTRY = _descriptor.Descriptor( + name="StateMetadataEntry", + full_name="google.cloud.recommender.v1beta1.MarkRecommendationClaimedRequest.StateMetadataEntry", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="key", + full_name="google.cloud.recommender.v1beta1.MarkRecommendationClaimedRequest.StateMetadataEntry.key", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="value", + full_name="google.cloud.recommender.v1beta1.MarkRecommendationClaimedRequest.StateMetadataEntry.value", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=_b("8\001"), + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=706, + serialized_end=758, +) + +_MARKRECOMMENDATIONCLAIMEDREQUEST = _descriptor.Descriptor( + name="MarkRecommendationClaimedRequest", + full_name="google.cloud.recommender.v1beta1.MarkRecommendationClaimedRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="name", + full_name="google.cloud.recommender.v1beta1.MarkRecommendationClaimedRequest.name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="state_metadata", + full_name="google.cloud.recommender.v1beta1.MarkRecommendationClaimedRequest.state_metadata", + index=1, + number=2, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="etag", + full_name="google.cloud.recommender.v1beta1.MarkRecommendationClaimedRequest.etag", + index=2, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[_MARKRECOMMENDATIONCLAIMEDREQUEST_STATEMETADATAENTRY], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=531, + serialized_end=758, +) + + +_MARKRECOMMENDATIONSUCCEEDEDREQUEST_STATEMETADATAENTRY = _descriptor.Descriptor( + name="StateMetadataEntry", + full_name="google.cloud.recommender.v1beta1.MarkRecommendationSucceededRequest.StateMetadataEntry", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="key", + full_name="google.cloud.recommender.v1beta1.MarkRecommendationSucceededRequest.StateMetadataEntry.key", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="value", + full_name="google.cloud.recommender.v1beta1.MarkRecommendationSucceededRequest.StateMetadataEntry.value", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=_b("8\001"), + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=706, + serialized_end=758, +) + +_MARKRECOMMENDATIONSUCCEEDEDREQUEST = _descriptor.Descriptor( + name="MarkRecommendationSucceededRequest", + full_name="google.cloud.recommender.v1beta1.MarkRecommendationSucceededRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="name", + full_name="google.cloud.recommender.v1beta1.MarkRecommendationSucceededRequest.name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="state_metadata", + full_name="google.cloud.recommender.v1beta1.MarkRecommendationSucceededRequest.state_metadata", + index=1, + number=2, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="etag", + full_name="google.cloud.recommender.v1beta1.MarkRecommendationSucceededRequest.etag", + index=2, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[_MARKRECOMMENDATIONSUCCEEDEDREQUEST_STATEMETADATAENTRY], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=761, + serialized_end=992, +) + + +_MARKRECOMMENDATIONFAILEDREQUEST_STATEMETADATAENTRY = _descriptor.Descriptor( + name="StateMetadataEntry", + full_name="google.cloud.recommender.v1beta1.MarkRecommendationFailedRequest.StateMetadataEntry", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="key", + full_name="google.cloud.recommender.v1beta1.MarkRecommendationFailedRequest.StateMetadataEntry.key", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="value", + full_name="google.cloud.recommender.v1beta1.MarkRecommendationFailedRequest.StateMetadataEntry.value", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=_b("8\001"), + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=706, + serialized_end=758, +) + +_MARKRECOMMENDATIONFAILEDREQUEST = _descriptor.Descriptor( + name="MarkRecommendationFailedRequest", + full_name="google.cloud.recommender.v1beta1.MarkRecommendationFailedRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="name", + full_name="google.cloud.recommender.v1beta1.MarkRecommendationFailedRequest.name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="state_metadata", + full_name="google.cloud.recommender.v1beta1.MarkRecommendationFailedRequest.state_metadata", + index=1, + number=2, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="etag", + full_name="google.cloud.recommender.v1beta1.MarkRecommendationFailedRequest.etag", + index=2, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[_MARKRECOMMENDATIONFAILEDREQUEST_STATEMETADATAENTRY], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=995, + serialized_end=1220, +) + +_LISTRECOMMENDATIONSRESPONSE.fields_by_name[ + "recommendations" +].message_type = ( + google_dot_cloud_dot_recommender__v1beta1_dot_proto_dot_recommendation__pb2._RECOMMENDATION +) +_MARKRECOMMENDATIONCLAIMEDREQUEST_STATEMETADATAENTRY.containing_type = ( + _MARKRECOMMENDATIONCLAIMEDREQUEST +) +_MARKRECOMMENDATIONCLAIMEDREQUEST.fields_by_name[ + "state_metadata" +].message_type = _MARKRECOMMENDATIONCLAIMEDREQUEST_STATEMETADATAENTRY +_MARKRECOMMENDATIONSUCCEEDEDREQUEST_STATEMETADATAENTRY.containing_type = ( + _MARKRECOMMENDATIONSUCCEEDEDREQUEST +) +_MARKRECOMMENDATIONSUCCEEDEDREQUEST.fields_by_name[ + "state_metadata" +].message_type = _MARKRECOMMENDATIONSUCCEEDEDREQUEST_STATEMETADATAENTRY +_MARKRECOMMENDATIONFAILEDREQUEST_STATEMETADATAENTRY.containing_type = ( + _MARKRECOMMENDATIONFAILEDREQUEST +) +_MARKRECOMMENDATIONFAILEDREQUEST.fields_by_name[ + "state_metadata" +].message_type = _MARKRECOMMENDATIONFAILEDREQUEST_STATEMETADATAENTRY +DESCRIPTOR.message_types_by_name[ + "ListRecommendationsRequest" +] = _LISTRECOMMENDATIONSREQUEST +DESCRIPTOR.message_types_by_name[ + "ListRecommendationsResponse" +] = _LISTRECOMMENDATIONSRESPONSE +DESCRIPTOR.message_types_by_name["GetRecommendationRequest"] = _GETRECOMMENDATIONREQUEST +DESCRIPTOR.message_types_by_name[ + "MarkRecommendationClaimedRequest" +] = _MARKRECOMMENDATIONCLAIMEDREQUEST +DESCRIPTOR.message_types_by_name[ + "MarkRecommendationSucceededRequest" +] = _MARKRECOMMENDATIONSUCCEEDEDREQUEST +DESCRIPTOR.message_types_by_name[ + "MarkRecommendationFailedRequest" +] = _MARKRECOMMENDATIONFAILEDREQUEST +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +ListRecommendationsRequest = _reflection.GeneratedProtocolMessageType( + "ListRecommendationsRequest", + (_message.Message,), + dict( + DESCRIPTOR=_LISTRECOMMENDATIONSREQUEST, + __module__="google.cloud.recommender_v1beta1.proto.recommender_service_pb2", + __doc__="""Request for the ``ListRecommendations`` method. + + + Attributes: + parent: + Required. The container resource on which to execute the + request. Acceptable formats: 1. "projects/[PROJECT\_NUMBER]/l + ocations/[LOCATION]/recommenders/[RECOMMENDER\_ID]", LOCATION + here refers to GCP Locations: + https://cloud.google.com/about/locations/ + page_size: + Optional. The maximum number of results to return from this + request. Non-positive values are ignored. If not specified, + the server will determine the number of results to return. + page_token: + Optional. If present, retrieves the next batch of results from + the preceding call to this method. ``page_token`` must be the + value of ``next_page_token`` from the previous response. The + values of other method parameters must be identical to those + in the previous call. + filter: + Filter expression to restrict the recommendations returned. + Supported filter fields: state\_info.state Eg: + \`state\_info.state:"DISMISSED" or state\_info.state:"FAILED" + """, + # @@protoc_insertion_point(class_scope:google.cloud.recommender.v1beta1.ListRecommendationsRequest) + ), +) +_sym_db.RegisterMessage(ListRecommendationsRequest) + +ListRecommendationsResponse = _reflection.GeneratedProtocolMessageType( + "ListRecommendationsResponse", + (_message.Message,), + dict( + DESCRIPTOR=_LISTRECOMMENDATIONSRESPONSE, + __module__="google.cloud.recommender_v1beta1.proto.recommender_service_pb2", + __doc__="""Response to the ``ListRecommendations`` method. + + + Attributes: + recommendations: + The set of recommendations for the ``parent`` resource. + next_page_token: + A token that can be used to request the next page of results. + This field is empty if there are no additional results. + """, + # @@protoc_insertion_point(class_scope:google.cloud.recommender.v1beta1.ListRecommendationsResponse) + ), +) +_sym_db.RegisterMessage(ListRecommendationsResponse) + +GetRecommendationRequest = _reflection.GeneratedProtocolMessageType( + "GetRecommendationRequest", + (_message.Message,), + dict( + DESCRIPTOR=_GETRECOMMENDATIONREQUEST, + __module__="google.cloud.recommender_v1beta1.proto.recommender_service_pb2", + __doc__="""Request to the ``GetRecommendation`` method. + + + Attributes: + name: + Name of the recommendation. + """, + # @@protoc_insertion_point(class_scope:google.cloud.recommender.v1beta1.GetRecommendationRequest) + ), +) +_sym_db.RegisterMessage(GetRecommendationRequest) + +MarkRecommendationClaimedRequest = _reflection.GeneratedProtocolMessageType( + "MarkRecommendationClaimedRequest", + (_message.Message,), + dict( + StateMetadataEntry=_reflection.GeneratedProtocolMessageType( + "StateMetadataEntry", + (_message.Message,), + dict( + DESCRIPTOR=_MARKRECOMMENDATIONCLAIMEDREQUEST_STATEMETADATAENTRY, + __module__="google.cloud.recommender_v1beta1.proto.recommender_service_pb2" + # @@protoc_insertion_point(class_scope:google.cloud.recommender.v1beta1.MarkRecommendationClaimedRequest.StateMetadataEntry) + ), + ), + DESCRIPTOR=_MARKRECOMMENDATIONCLAIMEDREQUEST, + __module__="google.cloud.recommender_v1beta1.proto.recommender_service_pb2", + __doc__="""Request for the ``MarkRecommendationClaimed`` Method. + + + Attributes: + name: + Name of the recommendation. + state_metadata: + State properties to include with this state. Overwrites any + existing ``state_metadata``. + etag: + Fingerprint of the Recommendation. Provides optimistic + locking. + """, + # @@protoc_insertion_point(class_scope:google.cloud.recommender.v1beta1.MarkRecommendationClaimedRequest) + ), +) +_sym_db.RegisterMessage(MarkRecommendationClaimedRequest) +_sym_db.RegisterMessage(MarkRecommendationClaimedRequest.StateMetadataEntry) + +MarkRecommendationSucceededRequest = _reflection.GeneratedProtocolMessageType( + "MarkRecommendationSucceededRequest", + (_message.Message,), + dict( + StateMetadataEntry=_reflection.GeneratedProtocolMessageType( + "StateMetadataEntry", + (_message.Message,), + dict( + DESCRIPTOR=_MARKRECOMMENDATIONSUCCEEDEDREQUEST_STATEMETADATAENTRY, + __module__="google.cloud.recommender_v1beta1.proto.recommender_service_pb2" + # @@protoc_insertion_point(class_scope:google.cloud.recommender.v1beta1.MarkRecommendationSucceededRequest.StateMetadataEntry) + ), + ), + DESCRIPTOR=_MARKRECOMMENDATIONSUCCEEDEDREQUEST, + __module__="google.cloud.recommender_v1beta1.proto.recommender_service_pb2", + __doc__="""Request for the ``MarkRecommendationSucceeded`` Method. + + + Attributes: + name: + Name of the recommendation. + state_metadata: + State properties to include with this state. Overwrites any + existing ``state_metadata``. + etag: + Fingerprint of the Recommendation. Provides optimistic + locking. + """, + # @@protoc_insertion_point(class_scope:google.cloud.recommender.v1beta1.MarkRecommendationSucceededRequest) + ), +) +_sym_db.RegisterMessage(MarkRecommendationSucceededRequest) +_sym_db.RegisterMessage(MarkRecommendationSucceededRequest.StateMetadataEntry) + +MarkRecommendationFailedRequest = _reflection.GeneratedProtocolMessageType( + "MarkRecommendationFailedRequest", + (_message.Message,), + dict( + StateMetadataEntry=_reflection.GeneratedProtocolMessageType( + "StateMetadataEntry", + (_message.Message,), + dict( + DESCRIPTOR=_MARKRECOMMENDATIONFAILEDREQUEST_STATEMETADATAENTRY, + __module__="google.cloud.recommender_v1beta1.proto.recommender_service_pb2" + # @@protoc_insertion_point(class_scope:google.cloud.recommender.v1beta1.MarkRecommendationFailedRequest.StateMetadataEntry) + ), + ), + DESCRIPTOR=_MARKRECOMMENDATIONFAILEDREQUEST, + __module__="google.cloud.recommender_v1beta1.proto.recommender_service_pb2", + __doc__="""Request for the ``MarkRecommendationFailed`` Method. + + + Attributes: + name: + Name of the recommendation. + state_metadata: + State properties to include with this state. Overwrites any + existing ``state_metadata``. + etag: + Fingerprint of the Recommendation. Provides optimistic + locking. + """, + # @@protoc_insertion_point(class_scope:google.cloud.recommender.v1beta1.MarkRecommendationFailedRequest) + ), +) +_sym_db.RegisterMessage(MarkRecommendationFailedRequest) +_sym_db.RegisterMessage(MarkRecommendationFailedRequest.StateMetadataEntry) + + +DESCRIPTOR._options = None +_MARKRECOMMENDATIONCLAIMEDREQUEST_STATEMETADATAENTRY._options = None +_MARKRECOMMENDATIONSUCCEEDEDREQUEST_STATEMETADATAENTRY._options = None +_MARKRECOMMENDATIONFAILEDREQUEST_STATEMETADATAENTRY._options = None + +_RECOMMENDER = _descriptor.ServiceDescriptor( + name="Recommender", + full_name="google.cloud.recommender.v1beta1.Recommender", + file=DESCRIPTOR, + index=0, + serialized_options=_b( + "\312A\032recommender.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform" + ), + serialized_start=1223, + serialized_end=2494, + methods=[ + _descriptor.MethodDescriptor( + name="ListRecommendations", + full_name="google.cloud.recommender.v1beta1.Recommender.ListRecommendations", + index=0, + containing_service=None, + input_type=_LISTRECOMMENDATIONSREQUEST, + output_type=_LISTRECOMMENDATIONSRESPONSE, + serialized_options=_b( + "\202\323\344\223\002I\022G/v1beta1/{parent=projects/*/locations/*/recommenders/*}/recommendations" + ), + ), + _descriptor.MethodDescriptor( + name="GetRecommendation", + full_name="google.cloud.recommender.v1beta1.Recommender.GetRecommendation", + index=1, + containing_service=None, + input_type=_GETRECOMMENDATIONREQUEST, + output_type=google_dot_cloud_dot_recommender__v1beta1_dot_proto_dot_recommendation__pb2._RECOMMENDATION, + serialized_options=_b( + "\202\323\344\223\002I\022G/v1beta1/{name=projects/*/locations/*/recommenders/*/recommendations/*}" + ), + ), + _descriptor.MethodDescriptor( + name="MarkRecommendationClaimed", + full_name="google.cloud.recommender.v1beta1.Recommender.MarkRecommendationClaimed", + index=2, + containing_service=None, + input_type=_MARKRECOMMENDATIONCLAIMEDREQUEST, + output_type=google_dot_cloud_dot_recommender__v1beta1_dot_proto_dot_recommendation__pb2._RECOMMENDATION, + serialized_options=_b( + '\202\323\344\223\002X"S/v1beta1/{name=projects/*/locations/*/recommenders/*/recommendations/*}:markClaimed:\001*' + ), + ), + _descriptor.MethodDescriptor( + name="MarkRecommendationSucceeded", + full_name="google.cloud.recommender.v1beta1.Recommender.MarkRecommendationSucceeded", + index=3, + containing_service=None, + input_type=_MARKRECOMMENDATIONSUCCEEDEDREQUEST, + output_type=google_dot_cloud_dot_recommender__v1beta1_dot_proto_dot_recommendation__pb2._RECOMMENDATION, + serialized_options=_b( + '\202\323\344\223\002Z"U/v1beta1/{name=projects/*/locations/*/recommenders/*/recommendations/*}:markSucceeded:\001*' + ), + ), + _descriptor.MethodDescriptor( + name="MarkRecommendationFailed", + full_name="google.cloud.recommender.v1beta1.Recommender.MarkRecommendationFailed", + index=4, + containing_service=None, + input_type=_MARKRECOMMENDATIONFAILEDREQUEST, + output_type=google_dot_cloud_dot_recommender__v1beta1_dot_proto_dot_recommendation__pb2._RECOMMENDATION, + serialized_options=_b( + '\202\323\344\223\002W"R/v1beta1/{name=projects/*/locations/*/recommenders/*/recommendations/*}:markFailed:\001*' + ), + ), + ], +) +_sym_db.RegisterServiceDescriptor(_RECOMMENDER) + +DESCRIPTOR.services_by_name["Recommender"] = _RECOMMENDER + +# @@protoc_insertion_point(module_scope) diff --git a/recommender/google/cloud/recommender_v1beta1/proto/recommender_service_pb2_grpc.py b/recommender/google/cloud/recommender_v1beta1/proto/recommender_service_pb2_grpc.py new file mode 100644 index 000000000000..24ffe1eb40fb --- /dev/null +++ b/recommender/google/cloud/recommender_v1beta1/proto/recommender_service_pb2_grpc.py @@ -0,0 +1,155 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +import grpc + +from google.cloud.recommender_v1beta1.proto import ( + recommendation_pb2 as google_dot_cloud_dot_recommender__v1beta1_dot_proto_dot_recommendation__pb2, +) +from google.cloud.recommender_v1beta1.proto import ( + recommender_service_pb2 as google_dot_cloud_dot_recommender__v1beta1_dot_proto_dot_recommender__service__pb2, +) + + +class RecommenderStub(object): + """Provides recommendations for cloud customers for various categories like + performance optimization, cost savings, reliability, feature discovery, etc. + These recommendations are generated automatically based on analysis of user + resources, configuration and monitoring metrics. + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.ListRecommendations = channel.unary_unary( + "/google.cloud.recommender.v1beta1.Recommender/ListRecommendations", + request_serializer=google_dot_cloud_dot_recommender__v1beta1_dot_proto_dot_recommender__service__pb2.ListRecommendationsRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_recommender__v1beta1_dot_proto_dot_recommender__service__pb2.ListRecommendationsResponse.FromString, + ) + self.GetRecommendation = channel.unary_unary( + "/google.cloud.recommender.v1beta1.Recommender/GetRecommendation", + request_serializer=google_dot_cloud_dot_recommender__v1beta1_dot_proto_dot_recommender__service__pb2.GetRecommendationRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_recommender__v1beta1_dot_proto_dot_recommendation__pb2.Recommendation.FromString, + ) + self.MarkRecommendationClaimed = channel.unary_unary( + "/google.cloud.recommender.v1beta1.Recommender/MarkRecommendationClaimed", + request_serializer=google_dot_cloud_dot_recommender__v1beta1_dot_proto_dot_recommender__service__pb2.MarkRecommendationClaimedRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_recommender__v1beta1_dot_proto_dot_recommendation__pb2.Recommendation.FromString, + ) + self.MarkRecommendationSucceeded = channel.unary_unary( + "/google.cloud.recommender.v1beta1.Recommender/MarkRecommendationSucceeded", + request_serializer=google_dot_cloud_dot_recommender__v1beta1_dot_proto_dot_recommender__service__pb2.MarkRecommendationSucceededRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_recommender__v1beta1_dot_proto_dot_recommendation__pb2.Recommendation.FromString, + ) + self.MarkRecommendationFailed = channel.unary_unary( + "/google.cloud.recommender.v1beta1.Recommender/MarkRecommendationFailed", + request_serializer=google_dot_cloud_dot_recommender__v1beta1_dot_proto_dot_recommender__service__pb2.MarkRecommendationFailedRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_recommender__v1beta1_dot_proto_dot_recommendation__pb2.Recommendation.FromString, + ) + + +class RecommenderServicer(object): + """Provides recommendations for cloud customers for various categories like + performance optimization, cost savings, reliability, feature discovery, etc. + These recommendations are generated automatically based on analysis of user + resources, configuration and monitoring metrics. + """ + + def ListRecommendations(self, request, context): + """Lists recommendations for a Cloud project. Requires the recommender.*.list + IAM permission for the specified recommender. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def GetRecommendation(self, request, context): + """Gets the requested recommendation. Requires the recommender.*.get + IAM permission for the specified recommender. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def MarkRecommendationClaimed(self, request, context): + """Mark the Recommendation State as Claimed. Users can use this method to + indicate to the Recommender API that they are starting to apply the + recommendation themselves. This stops the recommendation content from being + updated. + + MarkRecommendationClaimed can be applied to recommendations in CLAIMED, + SUCCEEDED, FAILED, or ACTIVE state. + + Requires the recommender.*.update IAM permission for the specified + recommender. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def MarkRecommendationSucceeded(self, request, context): + """Mark the Recommendation State as Succeeded. Users can use this method to + indicate to the Recommender API that they have applied the recommendation + themselves, and the operation was successful. This stops the recommendation + content from being updated. + + MarkRecommendationSucceeded can be applied to recommendations in ACTIVE, + CLAIMED, SUCCEEDED, or FAILED state. + + Requires the recommender.*.update IAM permission for the specified + recommender. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def MarkRecommendationFailed(self, request, context): + """Mark the Recommendation State as Failed. Users can use this method to + indicate to the Recommender API that they have applied the recommendation + themselves, and the operation failed. This stops the recommendation content + from being updated. + + MarkRecommendationFailed can be applied to recommendations in ACTIVE, + CLAIMED, SUCCEEDED, or FAILED state. + + Requires the recommender.*.update IAM permission for the specified + recommender. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + +def add_RecommenderServicer_to_server(servicer, server): + rpc_method_handlers = { + "ListRecommendations": grpc.unary_unary_rpc_method_handler( + servicer.ListRecommendations, + request_deserializer=google_dot_cloud_dot_recommender__v1beta1_dot_proto_dot_recommender__service__pb2.ListRecommendationsRequest.FromString, + response_serializer=google_dot_cloud_dot_recommender__v1beta1_dot_proto_dot_recommender__service__pb2.ListRecommendationsResponse.SerializeToString, + ), + "GetRecommendation": grpc.unary_unary_rpc_method_handler( + servicer.GetRecommendation, + request_deserializer=google_dot_cloud_dot_recommender__v1beta1_dot_proto_dot_recommender__service__pb2.GetRecommendationRequest.FromString, + response_serializer=google_dot_cloud_dot_recommender__v1beta1_dot_proto_dot_recommendation__pb2.Recommendation.SerializeToString, + ), + "MarkRecommendationClaimed": grpc.unary_unary_rpc_method_handler( + servicer.MarkRecommendationClaimed, + request_deserializer=google_dot_cloud_dot_recommender__v1beta1_dot_proto_dot_recommender__service__pb2.MarkRecommendationClaimedRequest.FromString, + response_serializer=google_dot_cloud_dot_recommender__v1beta1_dot_proto_dot_recommendation__pb2.Recommendation.SerializeToString, + ), + "MarkRecommendationSucceeded": grpc.unary_unary_rpc_method_handler( + servicer.MarkRecommendationSucceeded, + request_deserializer=google_dot_cloud_dot_recommender__v1beta1_dot_proto_dot_recommender__service__pb2.MarkRecommendationSucceededRequest.FromString, + response_serializer=google_dot_cloud_dot_recommender__v1beta1_dot_proto_dot_recommendation__pb2.Recommendation.SerializeToString, + ), + "MarkRecommendationFailed": grpc.unary_unary_rpc_method_handler( + servicer.MarkRecommendationFailed, + request_deserializer=google_dot_cloud_dot_recommender__v1beta1_dot_proto_dot_recommender__service__pb2.MarkRecommendationFailedRequest.FromString, + response_serializer=google_dot_cloud_dot_recommender__v1beta1_dot_proto_dot_recommendation__pb2.Recommendation.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + "google.cloud.recommender.v1beta1.Recommender", rpc_method_handlers + ) + server.add_generic_rpc_handlers((generic_handler,)) diff --git a/recommender/google/cloud/recommender_v1beta1/types.py b/recommender/google/cloud/recommender_v1beta1/types.py new file mode 100644 index 000000000000..205bae43ea9c --- /dev/null +++ b/recommender/google/cloud/recommender_v1beta1/types.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from __future__ import absolute_import +import sys + +from google.api_core.protobuf_helpers import get_messages + +from google.cloud.recommender_v1beta1.proto import recommendation_pb2 +from google.cloud.recommender_v1beta1.proto import recommender_service_pb2 +from google.protobuf import duration_pb2 +from google.protobuf import struct_pb2 +from google.protobuf import timestamp_pb2 +from google.type import money_pb2 + + +_shared_modules = [duration_pb2, struct_pb2, timestamp_pb2, money_pb2] + +_local_modules = [recommendation_pb2, recommender_service_pb2] + +names = [] + +for module in _shared_modules: # pragma: NO COVER + for name, message in get_messages(module).items(): + setattr(sys.modules[__name__], name, message) + names.append(name) +for module in _local_modules: + for name, message in get_messages(module).items(): + message.__module__ = "google.cloud.recommender_v1beta1.types" + setattr(sys.modules[__name__], name, message) + names.append(name) + + +__all__ = tuple(sorted(names)) diff --git a/recommender/mypy.ini b/recommender/mypy.ini new file mode 100644 index 000000000000..f23e6b533aad --- /dev/null +++ b/recommender/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.5 +namespace_packages = True diff --git a/recommender/noxfile.py b/recommender/noxfile.py new file mode 100644 index 000000000000..1f6797a2207f --- /dev/null +++ b/recommender/noxfile.py @@ -0,0 +1,160 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! + +from __future__ import absolute_import +import os +import shutil + +import nox + + +LOCAL_DEPS = (os.path.join("..", "api_core"), os.path.join("..", "core")) +BLACK_VERSION = "black==19.3b0" +BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + +if os.path.exists("samples"): + BLACK_PATHS.append("samples") + + +@nox.session(python="3.7") +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION, *LOCAL_DEPS) + session.run("black", "--check", *BLACK_PATHS) + session.run("flake8", "google", "tests") + + +@nox.session(python="3.6") +def blacken(session): + """Run black. + + Format code to uniform standard. + + This currently uses Python 3.6 due to the automated Kokoro run of synthtool. + That run uses an image that doesn't have 3.6 installed. Before updating this + check the state of the `gcp_ubuntu_config` we use for that Kokoro run. + """ + session.install(BLACK_VERSION) + session.run("black", *BLACK_PATHS) + + +@nox.session(python="3.7") +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def default(session): + # Install all test dependencies, then install this package in-place. + session.install("mock", "pytest", "pytest-cov") + for local_dep in LOCAL_DEPS: + session.install("-e", local_dep) + session.install("-e", ".") + + # Run py.test against the unit tests. + session.run( + "py.test", + "--quiet", + "--cov=google.cloud", + "--cov=tests.unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + ) + + +@nox.session(python=["3.5", "3.6", "3.7"]) +def unit(session): + """Run the unit test suite.""" + default(session) + + +@nox.session(python=["3.7"]) +def system(session): + """Run the system test suite.""" + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + # Sanity check: Only run tests if the environment variable is set. + if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""): + session.skip("Credentials must be set via environment variable") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + # Use pre-release gRPC for system tests. + session.install("--pre", "grpcio") + + # Install all test dependencies, then install this package into the + # virtualenv's dist-packages. + session.install("mock", "pytest") + for local_dep in LOCAL_DEPS: + session.install("-e", local_dep) + session.install("-e", "../test_utils/") + session.install("-e", ".") + + # Run py.test against the system tests. + if system_test_exists: + session.run("py.test", "--quiet", system_test_path, *session.posargs) + if system_test_folder_exists: + session.run("py.test", "--quiet", system_test_folder_path, *session.posargs) + + +@nox.session(python="3.7") +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python="3.7") +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install("sphinx", "alabaster", "recommonmark") + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) diff --git a/recommender/setup.cfg b/recommender/setup.cfg new file mode 100644 index 000000000000..3bd555500e37 --- /dev/null +++ b/recommender/setup.cfg @@ -0,0 +1,3 @@ +# Generated by synthtool. DO NOT EDIT! +[bdist_wheel] +universal = 1 diff --git a/recommender/setup.py b/recommender/setup.py new file mode 100644 index 000000000000..31a46fd840eb --- /dev/null +++ b/recommender/setup.py @@ -0,0 +1,70 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import io +import os + +import setuptools + +name = "google-cloud-recommender" +description = "Cloud Recommender API client library" +version = "0.1.0" +release_status = "Development Status :: 3 - Alpha" +dependencies = ["google-api-core[grpc] >= 1.14.0, < 2.0.0dev"] + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package for package in setuptools.find_packages() if package.startswith("google") +] + +namespaces = ["google"] +if "google.cloud" in packages: + namespaces.append("google.cloud") + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url="https://github.com/GoogleCloudPlatform/google-cloud-python", + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.5", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + namespace_packages=namespaces, + install_requires=dependencies, + python_requires=">=3.5", + include_package_data=True, + zip_safe=False, +) diff --git a/recommender/synth.metadata b/recommender/synth.metadata new file mode 100644 index 000000000000..41fd2c0857c7 --- /dev/null +++ b/recommender/synth.metadata @@ -0,0 +1,39 @@ +{ + "updateTime": "2019-10-05T12:31:38.542410Z", + "sources": [ + { + "generator": { + "name": "artman", + "version": "0.38.0", + "dockerImage": "googleapis/artman@sha256:0d2f8d429110aeb8d82df6550ef4ede59d40df9062d260a1580fce688b0512bf" + } + }, + { + "git": { + "name": "googleapis", + "remote": "https://github.com/googleapis/googleapis.git", + "sha": "ceb8e2fb12f048cc94caae532ef0b4cf026a78f3", + "internalRef": "272971705" + } + }, + { + "template": { + "name": "python_library", + "origin": "synthtool.gcp", + "version": "2019.5.2" + } + } + ], + "destinations": [ + { + "client": { + "source": "googleapis", + "apiName": "recommender", + "apiVersion": "v1beta1", + "language": "python", + "generator": "gapic", + "config": "google/cloud/recommender/artman_recommender_v1beta1.yaml" + } + } + ] +} \ No newline at end of file diff --git a/recommender/synth.py b/recommender/synth.py new file mode 100644 index 000000000000..93d445f42f11 --- /dev/null +++ b/recommender/synth.py @@ -0,0 +1,42 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This script is used to synthesize generated parts of this library.""" +import re + +import synthtool as s +from synthtool import gcp + +gapic = gcp.GAPICGenerator() +versions = ["v1beta1"] +common = gcp.CommonTemplates() + + +# ---------------------------------------------------------------------------- +# Generate Cloud Recommender +# ---------------------------------------------------------------------------- +for version in versions: + library = gapic.py_library( + "recommender", version, + include_protos=True + ) + s.move(library, excludes=['nox.py', 'docs/index.rst', 'README.rst', 'setup.py']) + +# ---------------------------------------------------------------------------- +# Add templated files +# ---------------------------------------------------------------------------- +templated_files = common.py_library(unit_cov_level=97, cov_level=100) +s.move(templated_files, excludes=['noxfile.py']) + +s.shell.run(["nox", "-s", "blacken"], hide_output=False) \ No newline at end of file diff --git a/recommender/tests/unit/gapic/v1beta1/test_recommender_client_v1beta1.py b/recommender/tests/unit/gapic/v1beta1/test_recommender_client_v1beta1.py new file mode 100644 index 000000000000..dafe315484bf --- /dev/null +++ b/recommender/tests/unit/gapic/v1beta1/test_recommender_client_v1beta1.py @@ -0,0 +1,324 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Unit tests.""" + +import mock +import pytest + +from google.cloud import recommender_v1beta1 +from google.cloud.recommender_v1beta1.proto import recommendation_pb2 +from google.cloud.recommender_v1beta1.proto import recommender_service_pb2 + + +class MultiCallableStub(object): + """Stub for the grpc.UnaryUnaryMultiCallable interface.""" + + def __init__(self, method, channel_stub): + self.method = method + self.channel_stub = channel_stub + + def __call__(self, request, timeout=None, metadata=None, credentials=None): + self.channel_stub.requests.append((self.method, request)) + + response = None + if self.channel_stub.responses: + response = self.channel_stub.responses.pop() + + if isinstance(response, Exception): + raise response + + if response: + return response + + +class ChannelStub(object): + """Stub for the grpc.Channel interface.""" + + def __init__(self, responses=[]): + self.responses = responses + self.requests = [] + + def unary_unary(self, method, request_serializer=None, response_deserializer=None): + return MultiCallableStub(method, self) + + +class CustomException(Exception): + pass + + +class TestRecommenderClient(object): + def test_list_recommendations(self): + # Setup Expected Response + next_page_token = "" + recommendations_element = {} + recommendations = [recommendations_element] + expected_response = { + "next_page_token": next_page_token, + "recommendations": recommendations, + } + expected_response = recommender_service_pb2.ListRecommendationsResponse( + **expected_response + ) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = recommender_v1beta1.RecommenderClient() + + # Setup Request + parent = client.recommender_path("[PROJECT]", "[LOCATION]", "[RECOMMENDER]") + + paged_list_response = client.list_recommendations(parent) + resources = list(paged_list_response) + assert len(resources) == 1 + + assert expected_response.recommendations[0] == resources[0] + + assert len(channel.requests) == 1 + expected_request = recommender_service_pb2.ListRecommendationsRequest( + parent=parent + ) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_list_recommendations_exception(self): + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = recommender_v1beta1.RecommenderClient() + + # Setup request + parent = client.recommender_path("[PROJECT]", "[LOCATION]", "[RECOMMENDER]") + + paged_list_response = client.list_recommendations(parent) + with pytest.raises(CustomException): + list(paged_list_response) + + def test_get_recommendation(self): + # Setup Expected Response + name_2 = "name2-1052831874" + description = "description-1724546052" + recommender_subtype = "recommenderSubtype-1488504412" + etag = "etag3123477" + expected_response = { + "name": name_2, + "description": description, + "recommender_subtype": recommender_subtype, + "etag": etag, + } + expected_response = recommendation_pb2.Recommendation(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = recommender_v1beta1.RecommenderClient() + + # Setup Request + name = client.recommendation_path( + "[PROJECT]", "[LOCATION]", "[RECOMMENDER]", "[RECOMMENDATION]" + ) + + response = client.get_recommendation(name) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = recommender_service_pb2.GetRecommendationRequest(name=name) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_get_recommendation_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = recommender_v1beta1.RecommenderClient() + + # Setup request + name = client.recommendation_path( + "[PROJECT]", "[LOCATION]", "[RECOMMENDER]", "[RECOMMENDATION]" + ) + + with pytest.raises(CustomException): + client.get_recommendation(name) + + def test_mark_recommendation_claimed(self): + # Setup Expected Response + name_2 = "name2-1052831874" + description = "description-1724546052" + recommender_subtype = "recommenderSubtype-1488504412" + etag_2 = "etag2-1293302904" + expected_response = { + "name": name_2, + "description": description, + "recommender_subtype": recommender_subtype, + "etag": etag_2, + } + expected_response = recommendation_pb2.Recommendation(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = recommender_v1beta1.RecommenderClient() + + # Setup Request + name = client.recommendation_path( + "[PROJECT]", "[LOCATION]", "[RECOMMENDER]", "[RECOMMENDATION]" + ) + etag = "etag3123477" + + response = client.mark_recommendation_claimed(name, etag) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = recommender_service_pb2.MarkRecommendationClaimedRequest( + name=name, etag=etag + ) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_mark_recommendation_claimed_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = recommender_v1beta1.RecommenderClient() + + # Setup request + name = client.recommendation_path( + "[PROJECT]", "[LOCATION]", "[RECOMMENDER]", "[RECOMMENDATION]" + ) + etag = "etag3123477" + + with pytest.raises(CustomException): + client.mark_recommendation_claimed(name, etag) + + def test_mark_recommendation_succeeded(self): + # Setup Expected Response + name_2 = "name2-1052831874" + description = "description-1724546052" + recommender_subtype = "recommenderSubtype-1488504412" + etag_2 = "etag2-1293302904" + expected_response = { + "name": name_2, + "description": description, + "recommender_subtype": recommender_subtype, + "etag": etag_2, + } + expected_response = recommendation_pb2.Recommendation(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = recommender_v1beta1.RecommenderClient() + + # Setup Request + name = client.recommendation_path( + "[PROJECT]", "[LOCATION]", "[RECOMMENDER]", "[RECOMMENDATION]" + ) + etag = "etag3123477" + + response = client.mark_recommendation_succeeded(name, etag) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = recommender_service_pb2.MarkRecommendationSucceededRequest( + name=name, etag=etag + ) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_mark_recommendation_succeeded_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = recommender_v1beta1.RecommenderClient() + + # Setup request + name = client.recommendation_path( + "[PROJECT]", "[LOCATION]", "[RECOMMENDER]", "[RECOMMENDATION]" + ) + etag = "etag3123477" + + with pytest.raises(CustomException): + client.mark_recommendation_succeeded(name, etag) + + def test_mark_recommendation_failed(self): + # Setup Expected Response + name_2 = "name2-1052831874" + description = "description-1724546052" + recommender_subtype = "recommenderSubtype-1488504412" + etag_2 = "etag2-1293302904" + expected_response = { + "name": name_2, + "description": description, + "recommender_subtype": recommender_subtype, + "etag": etag_2, + } + expected_response = recommendation_pb2.Recommendation(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = recommender_v1beta1.RecommenderClient() + + # Setup Request + name = client.recommendation_path( + "[PROJECT]", "[LOCATION]", "[RECOMMENDER]", "[RECOMMENDATION]" + ) + etag = "etag3123477" + + response = client.mark_recommendation_failed(name, etag) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = recommender_service_pb2.MarkRecommendationFailedRequest( + name=name, etag=etag + ) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_mark_recommendation_failed_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = recommender_v1beta1.RecommenderClient() + + # Setup request + name = client.recommendation_path( + "[PROJECT]", "[LOCATION]", "[RECOMMENDER]", "[RECOMMENDATION]" + ) + etag = "etag3123477" + + with pytest.raises(CustomException): + client.mark_recommendation_failed(name, etag) diff --git a/redis/google/cloud/redis_v1/gapic/cloud_redis_client.py b/redis/google/cloud/redis_v1/gapic/cloud_redis_client.py index 4049d4040528..5060fd7c5f77 100644 --- a/redis/google/cloud/redis_v1/gapic/cloud_redis_client.py +++ b/redis/google/cloud/redis_v1/gapic/cloud_redis_client.py @@ -230,69 +230,55 @@ def __init__( self._inner_api_calls = {} # Service calls - def create_instance( + def list_instances( self, parent, - instance_id, - instance, + page_size=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ - Creates a Redis instance based on the specified tier and memory size. + Lists all Redis instances owned by a project in either the specified + location (region) or all locations. - By default, the instance is accessible from the project's `default - network `__. + The location should have the following format: - The creation is executed asynchronously and callers may check the - returned operation to track its progress. Once the operation is - completed the Redis instance will be fully functional. Completed - longrunning.Operation will contain the new instance object in the - response field. + - ``projects/{project_id}/locations/{location_id}`` - The returned operation is automatically deleted after a few hours, so - there is no need to call DeleteOperation. + If ``location_id`` is specified as ``-`` (wildcard), then all regions + available to the project are queried, and the results are aggregated. Example: >>> from google.cloud import redis_v1 - >>> from google.cloud.redis_v1 import enums >>> >>> client = redis_v1.CloudRedisClient() >>> >>> parent = client.location_path('[PROJECT]', '[LOCATION]') - >>> instance_id = 'test_instance' - >>> tier = enums.Instance.Tier.BASIC - >>> memory_size_gb = 1 - >>> instance = {'tier': tier, 'memory_size_gb': memory_size_gb} >>> - >>> response = client.create_instance(parent, instance_id, instance) + >>> # Iterate over all results + >>> for element in client.list_instances(parent): + ... # process element + ... pass >>> - >>> def callback(operation_future): - ... # Handle result. - ... result = operation_future.result() >>> - >>> response.add_done_callback(callback) + >>> # Alternatively: >>> - >>> # Handle metadata. - >>> metadata = response.metadata() + >>> # Iterate over results one page at a time + >>> for page in client.list_instances(parent).pages: + ... for element in page: + ... # process element + ... pass Args: parent (str): Required. The resource name of the instance location using the form: ``projects/{project_id}/locations/{location_id}`` where ``location_id`` refers to a GCP region. - instance_id (str): Required. The logical name of the Redis instance in the customer project - with the following restrictions: - - - Must contain only lowercase letters, numbers, and hyphens. - - Must start with a letter. - - Must be between 1-40 characters. - - Must end with a number or a letter. - - Must be unique within the customer project / location - instance (Union[dict, ~google.cloud.redis_v1.types.Instance]): Required. A Redis [Instance] resource - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.redis_v1.types.Instance` + page_size (int): The maximum number of resources contained in the + underlying API response. If page streaming is performed per- + resource, this parameter does not affect the return value. If page + streaming is performed per-page, this determines the maximum number + of resources in a page. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will be retried using a default configuration. @@ -303,7 +289,10 @@ def create_instance( that is provided to the method. Returns: - A :class:`~google.cloud.redis_v1.types._OperationFuture` instance. + A :class:`~google.api_core.page_iterator.PageIterator` instance. + An iterable of :class:`~google.cloud.redis_v1.types.Instance` instances. + You can also iterate over the pages of the response + using its `pages` property. Raises: google.api_core.exceptions.GoogleAPICallError: If the request @@ -313,18 +302,18 @@ def create_instance( ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if "create_instance" not in self._inner_api_calls: + if "list_instances" not in self._inner_api_calls: self._inner_api_calls[ - "create_instance" + "list_instances" ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.create_instance, - default_retry=self._method_configs["CreateInstance"].retry, - default_timeout=self._method_configs["CreateInstance"].timeout, + self.transport.list_instances, + default_retry=self._method_configs["ListInstances"].retry, + default_timeout=self._method_configs["ListInstances"].timeout, client_info=self._client_info, ) - request = cloud_redis_pb2.CreateInstanceRequest( - parent=parent, instance_id=instance_id, instance=instance + request = cloud_redis_pb2.ListInstancesRequest( + parent=parent, page_size=page_size ) if metadata is None: metadata = [] @@ -339,71 +328,44 @@ def create_instance( ) metadata.append(routing_metadata) - operation = self._inner_api_calls["create_instance"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - return google.api_core.operation.from_gapic( - operation, - self.transport._operations_client, - cloud_redis_pb2.Instance, - metadata_type=cloud_redis_pb2.OperationMetadata, + iterator = google.api_core.page_iterator.GRPCIterator( + client=None, + method=functools.partial( + self._inner_api_calls["list_instances"], + retry=retry, + timeout=timeout, + metadata=metadata, + ), + request=request, + items_field="instances", + request_token_field="page_token", + response_token_field="next_page_token", ) + return iterator - def update_instance( + def get_instance( self, - update_mask, - instance, + name, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ - Updates the metadata and configuration of a specific Redis instance. - - Completed longrunning.Operation will contain the new instance object - in the response field. The returned operation is automatically deleted - after a few hours, so there is no need to call DeleteOperation. + Gets the details of a specific Redis instance. Example: >>> from google.cloud import redis_v1 >>> >>> client = redis_v1.CloudRedisClient() >>> - >>> paths_element = 'display_name' - >>> paths_element_2 = 'memory_size_gb' - >>> paths = [paths_element, paths_element_2] - >>> update_mask = {'paths': paths} - >>> display_name = ' instance.memory_size_gb=4' - >>> instance = {'display_name': display_name} - >>> - >>> response = client.update_instance(update_mask, instance) - >>> - >>> def callback(operation_future): - ... # Handle result. - ... result = operation_future.result() - >>> - >>> response.add_done_callback(callback) + >>> name = client.instance_path('[PROJECT]', '[LOCATION]', '[INSTANCE]') >>> - >>> # Handle metadata. - >>> metadata = response.metadata() + >>> response = client.get_instance(name) Args: - update_mask (Union[dict, ~google.cloud.redis_v1.types.FieldMask]): Required. Mask of fields to update. At least one path must be supplied - in this field. The elements of the repeated paths field may only include - these fields from ``Instance``: - - - ``displayName`` - - ``labels`` - - ``memorySizeGb`` - - ``redisConfig`` - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.redis_v1.types.FieldMask` - instance (Union[dict, ~google.cloud.redis_v1.types.Instance]): Required. Update description. Only fields specified in update\_mask are - updated. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.redis_v1.types.Instance` + name (str): Required. Redis instance resource name using the form: + ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + where ``location_id`` refers to a GCP region. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will be retried using a default configuration. @@ -414,7 +376,7 @@ def update_instance( that is provided to the method. Returns: - A :class:`~google.cloud.redis_v1.types._OperationFuture` instance. + A :class:`~google.cloud.redis_v1.types.Instance` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request @@ -424,24 +386,22 @@ def update_instance( ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if "update_instance" not in self._inner_api_calls: + if "get_instance" not in self._inner_api_calls: self._inner_api_calls[ - "update_instance" + "get_instance" ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.update_instance, - default_retry=self._method_configs["UpdateInstance"].retry, - default_timeout=self._method_configs["UpdateInstance"].timeout, + self.transport.get_instance, + default_retry=self._method_configs["GetInstance"].retry, + default_timeout=self._method_configs["GetInstance"].timeout, client_info=self._client_info, ) - request = cloud_redis_pb2.UpdateInstanceRequest( - update_mask=update_mask, instance=instance - ) + request = cloud_redis_pb2.GetInstanceRequest(name=name) if metadata is None: metadata = [] metadata = list(metadata) try: - routing_header = [("instance.name", instance.name)] + routing_header = [("name", name)] except AttributeError: pass else: @@ -450,65 +410,73 @@ def update_instance( ) metadata.append(routing_metadata) - operation = self._inner_api_calls["update_instance"]( + return self._inner_api_calls["get_instance"]( request, retry=retry, timeout=timeout, metadata=metadata ) - return google.api_core.operation.from_gapic( - operation, - self.transport._operations_client, - cloud_redis_pb2.Instance, - metadata_type=cloud_redis_pb2.OperationMetadata, - ) - def list_instances( + def create_instance( self, parent, - page_size=None, + instance_id, + instance, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ - Lists all Redis instances owned by a project in either the specified - location (region) or all locations. + Creates a Redis instance based on the specified tier and memory size. - The location should have the following format: + By default, the instance is accessible from the project's `default + network `__. - - ``projects/{project_id}/locations/{location_id}`` + The creation is executed asynchronously and callers may check the + returned operation to track its progress. Once the operation is + completed the Redis instance will be fully functional. Completed + longrunning.Operation will contain the new instance object in the + response field. - If ``location_id`` is specified as ``-`` (wildcard), then all regions - available to the project are queried, and the results are aggregated. + The returned operation is automatically deleted after a few hours, so + there is no need to call DeleteOperation. Example: >>> from google.cloud import redis_v1 + >>> from google.cloud.redis_v1 import enums >>> >>> client = redis_v1.CloudRedisClient() >>> >>> parent = client.location_path('[PROJECT]', '[LOCATION]') + >>> instance_id = 'test_instance' + >>> tier = enums.Instance.Tier.BASIC + >>> memory_size_gb = 1 + >>> instance = {'tier': tier, 'memory_size_gb': memory_size_gb} >>> - >>> # Iterate over all results - >>> for element in client.list_instances(parent): - ... # process element - ... pass + >>> response = client.create_instance(parent, instance_id, instance) >>> + >>> def callback(operation_future): + ... # Handle result. + ... result = operation_future.result() >>> - >>> # Alternatively: + >>> response.add_done_callback(callback) >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_instances(parent).pages: - ... for element in page: - ... # process element - ... pass + >>> # Handle metadata. + >>> metadata = response.metadata() Args: parent (str): Required. The resource name of the instance location using the form: ``projects/{project_id}/locations/{location_id}`` where ``location_id`` refers to a GCP region. - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. + instance_id (str): Required. The logical name of the Redis instance in the customer project + with the following restrictions: + + - Must contain only lowercase letters, numbers, and hyphens. + - Must start with a letter. + - Must be between 1-40 characters. + - Must end with a number or a letter. + - Must be unique within the customer project / location + instance (Union[dict, ~google.cloud.redis_v1.types.Instance]): Required. A Redis [Instance] resource + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.redis_v1.types.Instance` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will be retried using a default configuration. @@ -519,10 +487,7 @@ def list_instances( that is provided to the method. Returns: - A :class:`~google.api_core.page_iterator.PageIterator` instance. - An iterable of :class:`~google.cloud.redis_v1.types.Instance` instances. - You can also iterate over the pages of the response - using its `pages` property. + A :class:`~google.cloud.redis_v1.types._OperationFuture` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request @@ -532,18 +497,18 @@ def list_instances( ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if "list_instances" not in self._inner_api_calls: + if "create_instance" not in self._inner_api_calls: self._inner_api_calls[ - "list_instances" + "create_instance" ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_instances, - default_retry=self._method_configs["ListInstances"].retry, - default_timeout=self._method_configs["ListInstances"].timeout, + self.transport.create_instance, + default_retry=self._method_configs["CreateInstance"].retry, + default_timeout=self._method_configs["CreateInstance"].timeout, client_info=self._client_info, ) - request = cloud_redis_pb2.ListInstancesRequest( - parent=parent, page_size=page_size + request = cloud_redis_pb2.CreateInstanceRequest( + parent=parent, instance_id=instance_id, instance=instance ) if metadata is None: metadata = [] @@ -558,44 +523,71 @@ def list_instances( ) metadata.append(routing_metadata) - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_instances"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="instances", - request_token_field="page_token", - response_token_field="next_page_token", + operation = self._inner_api_calls["create_instance"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + return google.api_core.operation.from_gapic( + operation, + self.transport._operations_client, + cloud_redis_pb2.Instance, + metadata_type=cloud_redis_pb2.OperationMetadata, ) - return iterator - def get_instance( + def update_instance( self, - name, + update_mask, + instance, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ - Gets the details of a specific Redis instance. + Updates the metadata and configuration of a specific Redis instance. + + Completed longrunning.Operation will contain the new instance object + in the response field. The returned operation is automatically deleted + after a few hours, so there is no need to call DeleteOperation. Example: >>> from google.cloud import redis_v1 >>> >>> client = redis_v1.CloudRedisClient() >>> - >>> name = client.instance_path('[PROJECT]', '[LOCATION]', '[INSTANCE]') + >>> paths_element = 'display_name' + >>> paths_element_2 = 'memory_size_gb' + >>> paths = [paths_element, paths_element_2] + >>> update_mask = {'paths': paths} + >>> display_name = ' instance.memory_size_gb=4' + >>> instance = {'display_name': display_name} >>> - >>> response = client.get_instance(name) + >>> response = client.update_instance(update_mask, instance) + >>> + >>> def callback(operation_future): + ... # Handle result. + ... result = operation_future.result() + >>> + >>> response.add_done_callback(callback) + >>> + >>> # Handle metadata. + >>> metadata = response.metadata() Args: - name (str): Required. Redis instance resource name using the form: - ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` - where ``location_id`` refers to a GCP region. + update_mask (Union[dict, ~google.cloud.redis_v1.types.FieldMask]): Required. Mask of fields to update. At least one path must be supplied + in this field. The elements of the repeated paths field may only include + these fields from ``Instance``: + + - ``displayName`` + - ``labels`` + - ``memorySizeGb`` + - ``redisConfig`` + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.redis_v1.types.FieldMask` + instance (Union[dict, ~google.cloud.redis_v1.types.Instance]): Required. Update description. Only fields specified in update\_mask are + updated. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.redis_v1.types.Instance` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will be retried using a default configuration. @@ -606,7 +598,7 @@ def get_instance( that is provided to the method. Returns: - A :class:`~google.cloud.redis_v1.types.Instance` instance. + A :class:`~google.cloud.redis_v1.types._OperationFuture` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request @@ -616,22 +608,24 @@ def get_instance( ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if "get_instance" not in self._inner_api_calls: + if "update_instance" not in self._inner_api_calls: self._inner_api_calls[ - "get_instance" + "update_instance" ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_instance, - default_retry=self._method_configs["GetInstance"].retry, - default_timeout=self._method_configs["GetInstance"].timeout, + self.transport.update_instance, + default_retry=self._method_configs["UpdateInstance"].retry, + default_timeout=self._method_configs["UpdateInstance"].timeout, client_info=self._client_info, ) - request = cloud_redis_pb2.GetInstanceRequest(name=name) + request = cloud_redis_pb2.UpdateInstanceRequest( + update_mask=update_mask, instance=instance + ) if metadata is None: metadata = [] metadata = list(metadata) try: - routing_header = [("name", name)] + routing_header = [("instance.name", instance.name)] except AttributeError: pass else: @@ -640,9 +634,15 @@ def get_instance( ) metadata.append(routing_metadata) - return self._inner_api_calls["get_instance"]( + operation = self._inner_api_calls["update_instance"]( request, retry=retry, timeout=timeout, metadata=metadata ) + return google.api_core.operation.from_gapic( + operation, + self.transport._operations_client, + cloud_redis_pb2.Instance, + metadata_type=cloud_redis_pb2.OperationMetadata, + ) def import_instance( self, diff --git a/redis/google/cloud/redis_v1/gapic/cloud_redis_client_config.py b/redis/google/cloud/redis_v1/gapic/cloud_redis_client_config.py index 114fea730c91..9d0264891f0b 100644 --- a/redis/google/cloud/redis_v1/gapic/cloud_redis_client_config.py +++ b/redis/google/cloud/redis_v1/gapic/cloud_redis_client_config.py @@ -17,24 +17,24 @@ } }, "methods": { - "CreateInstance": { + "ListInstances": { "timeout_millis": 60000, "retry_codes_name": "non_idempotent", "retry_params_name": "default", }, - "UpdateInstance": { + "GetInstance": { "timeout_millis": 60000, "retry_codes_name": "non_idempotent", "retry_params_name": "default", }, - "ListInstances": { + "CreateInstance": { "timeout_millis": 60000, - "retry_codes_name": "idempotent", + "retry_codes_name": "non_idempotent", "retry_params_name": "default", }, - "GetInstance": { + "UpdateInstance": { "timeout_millis": 60000, - "retry_codes_name": "idempotent", + "retry_codes_name": "non_idempotent", "retry_params_name": "default", }, "ImportInstance": { diff --git a/redis/google/cloud/redis_v1/gapic/transports/cloud_redis_grpc_transport.py b/redis/google/cloud/redis_v1/gapic/transports/cloud_redis_grpc_transport.py index 4a5dab2b043c..e10be5fecd6a 100644 --- a/redis/google/cloud/redis_v1/gapic/transports/cloud_redis_grpc_transport.py +++ b/redis/google/cloud/redis_v1/gapic/transports/cloud_redis_grpc_transport.py @@ -114,80 +114,80 @@ def channel(self): return self._channel @property - def create_instance(self): - """Return the gRPC stub for :meth:`CloudRedisClient.create_instance`. + def list_instances(self): + """Return the gRPC stub for :meth:`CloudRedisClient.list_instances`. - Creates a Redis instance based on the specified tier and memory size. + Lists all Redis instances owned by a project in either the specified + location (region) or all locations. - By default, the instance is accessible from the project's `default - network `__. + The location should have the following format: - The creation is executed asynchronously and callers may check the - returned operation to track its progress. Once the operation is - completed the Redis instance will be fully functional. Completed - longrunning.Operation will contain the new instance object in the - response field. + - ``projects/{project_id}/locations/{location_id}`` - The returned operation is automatically deleted after a few hours, so - there is no need to call DeleteOperation. + If ``location_id`` is specified as ``-`` (wildcard), then all regions + available to the project are queried, and the results are aggregated. Returns: Callable: A callable which accepts the appropriate deserialized request object and returns a deserialized response object. """ - return self._stubs["cloud_redis_stub"].CreateInstance + return self._stubs["cloud_redis_stub"].ListInstances @property - def update_instance(self): - """Return the gRPC stub for :meth:`CloudRedisClient.update_instance`. - - Updates the metadata and configuration of a specific Redis instance. + def get_instance(self): + """Return the gRPC stub for :meth:`CloudRedisClient.get_instance`. - Completed longrunning.Operation will contain the new instance object - in the response field. The returned operation is automatically deleted - after a few hours, so there is no need to call DeleteOperation. + Gets the details of a specific Redis instance. Returns: Callable: A callable which accepts the appropriate deserialized request object and returns a deserialized response object. """ - return self._stubs["cloud_redis_stub"].UpdateInstance + return self._stubs["cloud_redis_stub"].GetInstance @property - def list_instances(self): - """Return the gRPC stub for :meth:`CloudRedisClient.list_instances`. + def create_instance(self): + """Return the gRPC stub for :meth:`CloudRedisClient.create_instance`. - Lists all Redis instances owned by a project in either the specified - location (region) or all locations. + Creates a Redis instance based on the specified tier and memory size. - The location should have the following format: + By default, the instance is accessible from the project's `default + network `__. - - ``projects/{project_id}/locations/{location_id}`` + The creation is executed asynchronously and callers may check the + returned operation to track its progress. Once the operation is + completed the Redis instance will be fully functional. Completed + longrunning.Operation will contain the new instance object in the + response field. - If ``location_id`` is specified as ``-`` (wildcard), then all regions - available to the project are queried, and the results are aggregated. + The returned operation is automatically deleted after a few hours, so + there is no need to call DeleteOperation. Returns: Callable: A callable which accepts the appropriate deserialized request object and returns a deserialized response object. """ - return self._stubs["cloud_redis_stub"].ListInstances + return self._stubs["cloud_redis_stub"].CreateInstance @property - def get_instance(self): - """Return the gRPC stub for :meth:`CloudRedisClient.get_instance`. + def update_instance(self): + """Return the gRPC stub for :meth:`CloudRedisClient.update_instance`. - Gets the details of a specific Redis instance. + Updates the metadata and configuration of a specific Redis instance. + + Completed longrunning.Operation will contain the new instance object + in the response field. The returned operation is automatically deleted + after a few hours, so there is no need to call DeleteOperation. Returns: Callable: A callable which accepts the appropriate deserialized request object and returns a deserialized response object. """ - return self._stubs["cloud_redis_stub"].GetInstance + return self._stubs["cloud_redis_stub"].UpdateInstance @property def import_instance(self): diff --git a/redis/google/cloud/redis_v1beta1/gapic/cloud_redis_client.py b/redis/google/cloud/redis_v1beta1/gapic/cloud_redis_client.py index f8d5755634b4..33e55cddd7dd 100644 --- a/redis/google/cloud/redis_v1beta1/gapic/cloud_redis_client.py +++ b/redis/google/cloud/redis_v1beta1/gapic/cloud_redis_client.py @@ -231,69 +231,55 @@ def __init__( self._inner_api_calls = {} # Service calls - def create_instance( + def list_instances( self, parent, - instance_id, - instance, + page_size=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ - Creates a Redis instance based on the specified tier and memory size. + Lists all Redis instances owned by a project in either the specified + location (region) or all locations. - By default, the instance is accessible from the project's `default - network `__. + The location should have the following format: - The creation is executed asynchronously and callers may check the - returned operation to track its progress. Once the operation is - completed the Redis instance will be fully functional. Completed - longrunning.Operation will contain the new instance object in the - response field. + - ``projects/{project_id}/locations/{location_id}`` - The returned operation is automatically deleted after a few hours, so - there is no need to call DeleteOperation. + If ``location_id`` is specified as ``-`` (wildcard), then all regions + available to the project are queried, and the results are aggregated. Example: >>> from google.cloud import redis_v1beta1 - >>> from google.cloud.redis_v1beta1 import enums >>> >>> client = redis_v1beta1.CloudRedisClient() >>> >>> parent = client.location_path('[PROJECT]', '[LOCATION]') - >>> instance_id = 'test_instance' - >>> tier = enums.Instance.Tier.BASIC - >>> memory_size_gb = 1 - >>> instance = {'tier': tier, 'memory_size_gb': memory_size_gb} >>> - >>> response = client.create_instance(parent, instance_id, instance) + >>> # Iterate over all results + >>> for element in client.list_instances(parent): + ... # process element + ... pass >>> - >>> def callback(operation_future): - ... # Handle result. - ... result = operation_future.result() >>> - >>> response.add_done_callback(callback) + >>> # Alternatively: >>> - >>> # Handle metadata. - >>> metadata = response.metadata() + >>> # Iterate over results one page at a time + >>> for page in client.list_instances(parent).pages: + ... for element in page: + ... # process element + ... pass Args: parent (str): Required. The resource name of the instance location using the form: ``projects/{project_id}/locations/{location_id}`` where ``location_id`` refers to a GCP region. - instance_id (str): Required. The logical name of the Redis instance in the customer project - with the following restrictions: - - - Must contain only lowercase letters, numbers, and hyphens. - - Must start with a letter. - - Must be between 1-40 characters. - - Must end with a number or a letter. - - Must be unique within the customer project / location - instance (Union[dict, ~google.cloud.redis_v1beta1.types.Instance]): Required. A Redis [Instance] resource - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.redis_v1beta1.types.Instance` + page_size (int): The maximum number of resources contained in the + underlying API response. If page streaming is performed per- + resource, this parameter does not affect the return value. If page + streaming is performed per-page, this determines the maximum number + of resources in a page. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will be retried using a default configuration. @@ -304,7 +290,10 @@ def create_instance( that is provided to the method. Returns: - A :class:`~google.cloud.redis_v1beta1.types._OperationFuture` instance. + A :class:`~google.api_core.page_iterator.PageIterator` instance. + An iterable of :class:`~google.cloud.redis_v1beta1.types.Instance` instances. + You can also iterate over the pages of the response + using its `pages` property. Raises: google.api_core.exceptions.GoogleAPICallError: If the request @@ -314,18 +303,18 @@ def create_instance( ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if "create_instance" not in self._inner_api_calls: + if "list_instances" not in self._inner_api_calls: self._inner_api_calls[ - "create_instance" + "list_instances" ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.create_instance, - default_retry=self._method_configs["CreateInstance"].retry, - default_timeout=self._method_configs["CreateInstance"].timeout, + self.transport.list_instances, + default_retry=self._method_configs["ListInstances"].retry, + default_timeout=self._method_configs["ListInstances"].timeout, client_info=self._client_info, ) - request = cloud_redis_pb2.CreateInstanceRequest( - parent=parent, instance_id=instance_id, instance=instance + request = cloud_redis_pb2.ListInstancesRequest( + parent=parent, page_size=page_size ) if metadata is None: metadata = [] @@ -340,72 +329,44 @@ def create_instance( ) metadata.append(routing_metadata) - operation = self._inner_api_calls["create_instance"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - return google.api_core.operation.from_gapic( - operation, - self.transport._operations_client, - cloud_redis_pb2.Instance, - metadata_type=any_pb2.Any, + iterator = google.api_core.page_iterator.GRPCIterator( + client=None, + method=functools.partial( + self._inner_api_calls["list_instances"], + retry=retry, + timeout=timeout, + metadata=metadata, + ), + request=request, + items_field="instances", + request_token_field="page_token", + response_token_field="next_page_token", ) + return iterator - def update_instance( + def get_instance( self, - update_mask, - instance, + name, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ - Updates the metadata and configuration of a specific Redis instance. - - Completed longrunning.Operation will contain the new instance object - in the response field. The returned operation is automatically deleted - after a few hours, so there is no need to call DeleteOperation. + Gets the details of a specific Redis instance. Example: >>> from google.cloud import redis_v1beta1 >>> >>> client = redis_v1beta1.CloudRedisClient() >>> - >>> paths_element = 'display_name' - >>> paths_element_2 = 'memory_size_gb' - >>> paths = [paths_element, paths_element_2] - >>> update_mask = {'paths': paths} - >>> display_name = 'UpdatedDisplayName' - >>> memory_size_gb = 4 - >>> instance = {'display_name': display_name, 'memory_size_gb': memory_size_gb} - >>> - >>> response = client.update_instance(update_mask, instance) - >>> - >>> def callback(operation_future): - ... # Handle result. - ... result = operation_future.result() - >>> - >>> response.add_done_callback(callback) + >>> name = client.instance_path('[PROJECT]', '[LOCATION]', '[INSTANCE]') >>> - >>> # Handle metadata. - >>> metadata = response.metadata() + >>> response = client.get_instance(name) Args: - update_mask (Union[dict, ~google.cloud.redis_v1beta1.types.FieldMask]): Required. Mask of fields to update. At least one path must be supplied - in this field. The elements of the repeated paths field may only include - these fields from ``Instance``: - - - ``displayName`` - - ``labels`` - - ``memorySizeGb`` - - ``redisConfig`` - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.redis_v1beta1.types.FieldMask` - instance (Union[dict, ~google.cloud.redis_v1beta1.types.Instance]): Required. Update description. Only fields specified in update\_mask are - updated. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.redis_v1beta1.types.Instance` + name (str): Required. Redis instance resource name using the form: + ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + where ``location_id`` refers to a GCP region. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will be retried using a default configuration. @@ -416,7 +377,7 @@ def update_instance( that is provided to the method. Returns: - A :class:`~google.cloud.redis_v1beta1.types._OperationFuture` instance. + A :class:`~google.cloud.redis_v1beta1.types.Instance` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request @@ -426,24 +387,22 @@ def update_instance( ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if "update_instance" not in self._inner_api_calls: + if "get_instance" not in self._inner_api_calls: self._inner_api_calls[ - "update_instance" + "get_instance" ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.update_instance, - default_retry=self._method_configs["UpdateInstance"].retry, - default_timeout=self._method_configs["UpdateInstance"].timeout, + self.transport.get_instance, + default_retry=self._method_configs["GetInstance"].retry, + default_timeout=self._method_configs["GetInstance"].timeout, client_info=self._client_info, ) - request = cloud_redis_pb2.UpdateInstanceRequest( - update_mask=update_mask, instance=instance - ) + request = cloud_redis_pb2.GetInstanceRequest(name=name) if metadata is None: metadata = [] metadata = list(metadata) try: - routing_header = [("instance.name", instance.name)] + routing_header = [("name", name)] except AttributeError: pass else: @@ -452,65 +411,73 @@ def update_instance( ) metadata.append(routing_metadata) - operation = self._inner_api_calls["update_instance"]( + return self._inner_api_calls["get_instance"]( request, retry=retry, timeout=timeout, metadata=metadata ) - return google.api_core.operation.from_gapic( - operation, - self.transport._operations_client, - cloud_redis_pb2.Instance, - metadata_type=any_pb2.Any, - ) - def list_instances( + def create_instance( self, parent, - page_size=None, + instance_id, + instance, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ - Lists all Redis instances owned by a project in either the specified - location (region) or all locations. + Creates a Redis instance based on the specified tier and memory size. - The location should have the following format: + By default, the instance is accessible from the project's `default + network `__. - - ``projects/{project_id}/locations/{location_id}`` + The creation is executed asynchronously and callers may check the + returned operation to track its progress. Once the operation is + completed the Redis instance will be fully functional. Completed + longrunning.Operation will contain the new instance object in the + response field. - If ``location_id`` is specified as ``-`` (wildcard), then all regions - available to the project are queried, and the results are aggregated. + The returned operation is automatically deleted after a few hours, so + there is no need to call DeleteOperation. Example: >>> from google.cloud import redis_v1beta1 + >>> from google.cloud.redis_v1beta1 import enums >>> >>> client = redis_v1beta1.CloudRedisClient() >>> >>> parent = client.location_path('[PROJECT]', '[LOCATION]') + >>> instance_id = 'test_instance' + >>> tier = enums.Instance.Tier.BASIC + >>> memory_size_gb = 1 + >>> instance = {'tier': tier, 'memory_size_gb': memory_size_gb} >>> - >>> # Iterate over all results - >>> for element in client.list_instances(parent): - ... # process element - ... pass + >>> response = client.create_instance(parent, instance_id, instance) >>> + >>> def callback(operation_future): + ... # Handle result. + ... result = operation_future.result() >>> - >>> # Alternatively: + >>> response.add_done_callback(callback) >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_instances(parent).pages: - ... for element in page: - ... # process element - ... pass + >>> # Handle metadata. + >>> metadata = response.metadata() Args: parent (str): Required. The resource name of the instance location using the form: ``projects/{project_id}/locations/{location_id}`` where ``location_id`` refers to a GCP region. - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. + instance_id (str): Required. The logical name of the Redis instance in the customer project + with the following restrictions: + + - Must contain only lowercase letters, numbers, and hyphens. + - Must start with a letter. + - Must be between 1-40 characters. + - Must end with a number or a letter. + - Must be unique within the customer project / location + instance (Union[dict, ~google.cloud.redis_v1beta1.types.Instance]): Required. A Redis [Instance] resource + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.redis_v1beta1.types.Instance` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will be retried using a default configuration. @@ -521,10 +488,7 @@ def list_instances( that is provided to the method. Returns: - A :class:`~google.api_core.page_iterator.PageIterator` instance. - An iterable of :class:`~google.cloud.redis_v1beta1.types.Instance` instances. - You can also iterate over the pages of the response - using its `pages` property. + A :class:`~google.cloud.redis_v1beta1.types._OperationFuture` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request @@ -534,18 +498,18 @@ def list_instances( ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if "list_instances" not in self._inner_api_calls: + if "create_instance" not in self._inner_api_calls: self._inner_api_calls[ - "list_instances" + "create_instance" ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_instances, - default_retry=self._method_configs["ListInstances"].retry, - default_timeout=self._method_configs["ListInstances"].timeout, + self.transport.create_instance, + default_retry=self._method_configs["CreateInstance"].retry, + default_timeout=self._method_configs["CreateInstance"].timeout, client_info=self._client_info, ) - request = cloud_redis_pb2.ListInstancesRequest( - parent=parent, page_size=page_size + request = cloud_redis_pb2.CreateInstanceRequest( + parent=parent, instance_id=instance_id, instance=instance ) if metadata is None: metadata = [] @@ -560,44 +524,72 @@ def list_instances( ) metadata.append(routing_metadata) - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_instances"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="instances", - request_token_field="page_token", - response_token_field="next_page_token", + operation = self._inner_api_calls["create_instance"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + return google.api_core.operation.from_gapic( + operation, + self.transport._operations_client, + cloud_redis_pb2.Instance, + metadata_type=any_pb2.Any, ) - return iterator - def get_instance( + def update_instance( self, - name, + update_mask, + instance, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ - Gets the details of a specific Redis instance. + Updates the metadata and configuration of a specific Redis instance. + + Completed longrunning.Operation will contain the new instance object + in the response field. The returned operation is automatically deleted + after a few hours, so there is no need to call DeleteOperation. Example: >>> from google.cloud import redis_v1beta1 >>> >>> client = redis_v1beta1.CloudRedisClient() >>> - >>> name = client.instance_path('[PROJECT]', '[LOCATION]', '[INSTANCE]') + >>> paths_element = 'display_name' + >>> paths_element_2 = 'memory_size_gb' + >>> paths = [paths_element, paths_element_2] + >>> update_mask = {'paths': paths} + >>> display_name = 'UpdatedDisplayName' + >>> memory_size_gb = 4 + >>> instance = {'display_name': display_name, 'memory_size_gb': memory_size_gb} >>> - >>> response = client.get_instance(name) + >>> response = client.update_instance(update_mask, instance) + >>> + >>> def callback(operation_future): + ... # Handle result. + ... result = operation_future.result() + >>> + >>> response.add_done_callback(callback) + >>> + >>> # Handle metadata. + >>> metadata = response.metadata() Args: - name (str): Required. Redis instance resource name using the form: - ``projects/{project_id}/locations/{location_id}/instances/{instance_id}`` - where ``location_id`` refers to a GCP region. + update_mask (Union[dict, ~google.cloud.redis_v1beta1.types.FieldMask]): Required. Mask of fields to update. At least one path must be supplied + in this field. The elements of the repeated paths field may only include + these fields from ``Instance``: + + - ``displayName`` + - ``labels`` + - ``memorySizeGb`` + - ``redisConfig`` + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.redis_v1beta1.types.FieldMask` + instance (Union[dict, ~google.cloud.redis_v1beta1.types.Instance]): Required. Update description. Only fields specified in update\_mask are + updated. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.redis_v1beta1.types.Instance` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will be retried using a default configuration. @@ -608,7 +600,7 @@ def get_instance( that is provided to the method. Returns: - A :class:`~google.cloud.redis_v1beta1.types.Instance` instance. + A :class:`~google.cloud.redis_v1beta1.types._OperationFuture` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request @@ -618,22 +610,24 @@ def get_instance( ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if "get_instance" not in self._inner_api_calls: + if "update_instance" not in self._inner_api_calls: self._inner_api_calls[ - "get_instance" + "update_instance" ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_instance, - default_retry=self._method_configs["GetInstance"].retry, - default_timeout=self._method_configs["GetInstance"].timeout, + self.transport.update_instance, + default_retry=self._method_configs["UpdateInstance"].retry, + default_timeout=self._method_configs["UpdateInstance"].timeout, client_info=self._client_info, ) - request = cloud_redis_pb2.GetInstanceRequest(name=name) + request = cloud_redis_pb2.UpdateInstanceRequest( + update_mask=update_mask, instance=instance + ) if metadata is None: metadata = [] metadata = list(metadata) try: - routing_header = [("name", name)] + routing_header = [("instance.name", instance.name)] except AttributeError: pass else: @@ -642,9 +636,15 @@ def get_instance( ) metadata.append(routing_metadata) - return self._inner_api_calls["get_instance"]( + operation = self._inner_api_calls["update_instance"]( request, retry=retry, timeout=timeout, metadata=metadata ) + return google.api_core.operation.from_gapic( + operation, + self.transport._operations_client, + cloud_redis_pb2.Instance, + metadata_type=any_pb2.Any, + ) def import_instance( self, diff --git a/redis/google/cloud/redis_v1beta1/gapic/cloud_redis_client_config.py b/redis/google/cloud/redis_v1beta1/gapic/cloud_redis_client_config.py index bc2a7315175d..262a77dc9af0 100644 --- a/redis/google/cloud/redis_v1beta1/gapic/cloud_redis_client_config.py +++ b/redis/google/cloud/redis_v1beta1/gapic/cloud_redis_client_config.py @@ -17,24 +17,24 @@ } }, "methods": { - "CreateInstance": { + "ListInstances": { "timeout_millis": 60000, "retry_codes_name": "non_idempotent", "retry_params_name": "default", }, - "UpdateInstance": { + "GetInstance": { "timeout_millis": 60000, "retry_codes_name": "non_idempotent", "retry_params_name": "default", }, - "ListInstances": { + "CreateInstance": { "timeout_millis": 60000, - "retry_codes_name": "idempotent", + "retry_codes_name": "non_idempotent", "retry_params_name": "default", }, - "GetInstance": { + "UpdateInstance": { "timeout_millis": 60000, - "retry_codes_name": "idempotent", + "retry_codes_name": "non_idempotent", "retry_params_name": "default", }, "ImportInstance": { diff --git a/redis/google/cloud/redis_v1beta1/gapic/transports/cloud_redis_grpc_transport.py b/redis/google/cloud/redis_v1beta1/gapic/transports/cloud_redis_grpc_transport.py index 28161ca905ea..72b2cd5cfef9 100644 --- a/redis/google/cloud/redis_v1beta1/gapic/transports/cloud_redis_grpc_transport.py +++ b/redis/google/cloud/redis_v1beta1/gapic/transports/cloud_redis_grpc_transport.py @@ -114,80 +114,80 @@ def channel(self): return self._channel @property - def create_instance(self): - """Return the gRPC stub for :meth:`CloudRedisClient.create_instance`. + def list_instances(self): + """Return the gRPC stub for :meth:`CloudRedisClient.list_instances`. - Creates a Redis instance based on the specified tier and memory size. + Lists all Redis instances owned by a project in either the specified + location (region) or all locations. - By default, the instance is accessible from the project's `default - network `__. + The location should have the following format: - The creation is executed asynchronously and callers may check the - returned operation to track its progress. Once the operation is - completed the Redis instance will be fully functional. Completed - longrunning.Operation will contain the new instance object in the - response field. + - ``projects/{project_id}/locations/{location_id}`` - The returned operation is automatically deleted after a few hours, so - there is no need to call DeleteOperation. + If ``location_id`` is specified as ``-`` (wildcard), then all regions + available to the project are queried, and the results are aggregated. Returns: Callable: A callable which accepts the appropriate deserialized request object and returns a deserialized response object. """ - return self._stubs["cloud_redis_stub"].CreateInstance + return self._stubs["cloud_redis_stub"].ListInstances @property - def update_instance(self): - """Return the gRPC stub for :meth:`CloudRedisClient.update_instance`. - - Updates the metadata and configuration of a specific Redis instance. + def get_instance(self): + """Return the gRPC stub for :meth:`CloudRedisClient.get_instance`. - Completed longrunning.Operation will contain the new instance object - in the response field. The returned operation is automatically deleted - after a few hours, so there is no need to call DeleteOperation. + Gets the details of a specific Redis instance. Returns: Callable: A callable which accepts the appropriate deserialized request object and returns a deserialized response object. """ - return self._stubs["cloud_redis_stub"].UpdateInstance + return self._stubs["cloud_redis_stub"].GetInstance @property - def list_instances(self): - """Return the gRPC stub for :meth:`CloudRedisClient.list_instances`. + def create_instance(self): + """Return the gRPC stub for :meth:`CloudRedisClient.create_instance`. - Lists all Redis instances owned by a project in either the specified - location (region) or all locations. + Creates a Redis instance based on the specified tier and memory size. - The location should have the following format: + By default, the instance is accessible from the project's `default + network `__. - - ``projects/{project_id}/locations/{location_id}`` + The creation is executed asynchronously and callers may check the + returned operation to track its progress. Once the operation is + completed the Redis instance will be fully functional. Completed + longrunning.Operation will contain the new instance object in the + response field. - If ``location_id`` is specified as ``-`` (wildcard), then all regions - available to the project are queried, and the results are aggregated. + The returned operation is automatically deleted after a few hours, so + there is no need to call DeleteOperation. Returns: Callable: A callable which accepts the appropriate deserialized request object and returns a deserialized response object. """ - return self._stubs["cloud_redis_stub"].ListInstances + return self._stubs["cloud_redis_stub"].CreateInstance @property - def get_instance(self): - """Return the gRPC stub for :meth:`CloudRedisClient.get_instance`. + def update_instance(self): + """Return the gRPC stub for :meth:`CloudRedisClient.update_instance`. - Gets the details of a specific Redis instance. + Updates the metadata and configuration of a specific Redis instance. + + Completed longrunning.Operation will contain the new instance object + in the response field. The returned operation is automatically deleted + after a few hours, so there is no need to call DeleteOperation. Returns: Callable: A callable which accepts the appropriate deserialized request object and returns a deserialized response object. """ - return self._stubs["cloud_redis_stub"].GetInstance + return self._stubs["cloud_redis_stub"].UpdateInstance @property def import_instance(self): diff --git a/redis/synth.metadata b/redis/synth.metadata index 10438c165c33..1a7e4f97ab61 100644 --- a/redis/synth.metadata +++ b/redis/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-09-22T12:26:55.965097Z", + "updateTime": "2019-10-08T12:31:15.017267Z", "sources": [ { "generator": { "name": "artman", - "version": "0.37.0", - "dockerImage": "googleapis/artman@sha256:0f66008f69061ea6d41499e2a34da3fc64fc7c9798077e3a37158653a135d801" + "version": "0.38.0", + "dockerImage": "googleapis/artman@sha256:0d2f8d429110aeb8d82df6550ef4ede59d40df9062d260a1580fce688b0512bf" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "999d0930cea7a7cb3147a7c5432e1f011060d549", - "internalRef": "270363949" + "sha": "122bdbf877ad87439f8dd9d1474a8e5dde188087", + "internalRef": "273381131" } }, { diff --git a/redis/tests/unit/gapic/v1/test_cloud_redis_client_v1.py b/redis/tests/unit/gapic/v1/test_cloud_redis_client_v1.py index 5fa4196a510c..b2ddde330d54 100644 --- a/redis/tests/unit/gapic/v1/test_cloud_redis_client_v1.py +++ b/redis/tests/unit/gapic/v1/test_cloud_redis_client_v1.py @@ -66,6 +66,113 @@ class CustomException(Exception): class TestCloudRedisClient(object): + def test_list_instances(self): + # Setup Expected Response + next_page_token = "" + instances_element = {} + instances = [instances_element] + expected_response = {"next_page_token": next_page_token, "instances": instances} + expected_response = cloud_redis_pb2.ListInstancesResponse(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = redis_v1.CloudRedisClient() + + # Setup Request + parent = client.location_path("[PROJECT]", "[LOCATION]") + + paged_list_response = client.list_instances(parent) + resources = list(paged_list_response) + assert len(resources) == 1 + + assert expected_response.instances[0] == resources[0] + + assert len(channel.requests) == 1 + expected_request = cloud_redis_pb2.ListInstancesRequest(parent=parent) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_list_instances_exception(self): + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = redis_v1.CloudRedisClient() + + # Setup request + parent = client.location_path("[PROJECT]", "[LOCATION]") + + paged_list_response = client.list_instances(parent) + with pytest.raises(CustomException): + list(paged_list_response) + + def test_get_instance(self): + # Setup Expected Response + name_2 = "name2-1052831874" + display_name = "displayName1615086568" + location_id = "locationId552319461" + alternative_location_id = "alternativeLocationId-718920621" + redis_version = "redisVersion-685310444" + reserved_ip_range = "reservedIpRange-1082940580" + host = "host3208616" + port = 3446913 + current_location_id = "currentLocationId1312712735" + status_message = "statusMessage-239442758" + memory_size_gb = 34199707 + authorized_network = "authorizedNetwork-1733809270" + persistence_iam_identity = "persistenceIamIdentity1061944584" + expected_response = { + "name": name_2, + "display_name": display_name, + "location_id": location_id, + "alternative_location_id": alternative_location_id, + "redis_version": redis_version, + "reserved_ip_range": reserved_ip_range, + "host": host, + "port": port, + "current_location_id": current_location_id, + "status_message": status_message, + "memory_size_gb": memory_size_gb, + "authorized_network": authorized_network, + "persistence_iam_identity": persistence_iam_identity, + } + expected_response = cloud_redis_pb2.Instance(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = redis_v1.CloudRedisClient() + + # Setup Request + name = client.instance_path("[PROJECT]", "[LOCATION]", "[INSTANCE]") + + response = client.get_instance(name) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = cloud_redis_pb2.GetInstanceRequest(name=name) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_get_instance_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = redis_v1.CloudRedisClient() + + # Setup request + name = client.instance_path("[PROJECT]", "[LOCATION]", "[INSTANCE]") + + with pytest.raises(CustomException): + client.get_instance(name) + def test_create_instance(self): # Setup Expected Response name = "name3373707" @@ -242,113 +349,6 @@ def test_update_instance_exception(self): exception = response.exception() assert exception.errors[0] == error - def test_list_instances(self): - # Setup Expected Response - next_page_token = "" - instances_element = {} - instances = [instances_element] - expected_response = {"next_page_token": next_page_token, "instances": instances} - expected_response = cloud_redis_pb2.ListInstancesResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = redis_v1.CloudRedisClient() - - # Setup Request - parent = client.location_path("[PROJECT]", "[LOCATION]") - - paged_list_response = client.list_instances(parent) - resources = list(paged_list_response) - assert len(resources) == 1 - - assert expected_response.instances[0] == resources[0] - - assert len(channel.requests) == 1 - expected_request = cloud_redis_pb2.ListInstancesRequest(parent=parent) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_list_instances_exception(self): - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = redis_v1.CloudRedisClient() - - # Setup request - parent = client.location_path("[PROJECT]", "[LOCATION]") - - paged_list_response = client.list_instances(parent) - with pytest.raises(CustomException): - list(paged_list_response) - - def test_get_instance(self): - # Setup Expected Response - name_2 = "name2-1052831874" - display_name = "displayName1615086568" - location_id = "locationId552319461" - alternative_location_id = "alternativeLocationId-718920621" - redis_version = "redisVersion-685310444" - reserved_ip_range = "reservedIpRange-1082940580" - host = "host3208616" - port = 3446913 - current_location_id = "currentLocationId1312712735" - status_message = "statusMessage-239442758" - memory_size_gb = 34199707 - authorized_network = "authorizedNetwork-1733809270" - persistence_iam_identity = "persistenceIamIdentity1061944584" - expected_response = { - "name": name_2, - "display_name": display_name, - "location_id": location_id, - "alternative_location_id": alternative_location_id, - "redis_version": redis_version, - "reserved_ip_range": reserved_ip_range, - "host": host, - "port": port, - "current_location_id": current_location_id, - "status_message": status_message, - "memory_size_gb": memory_size_gb, - "authorized_network": authorized_network, - "persistence_iam_identity": persistence_iam_identity, - } - expected_response = cloud_redis_pb2.Instance(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = redis_v1.CloudRedisClient() - - # Setup Request - name = client.instance_path("[PROJECT]", "[LOCATION]", "[INSTANCE]") - - response = client.get_instance(name) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = cloud_redis_pb2.GetInstanceRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_get_instance_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = redis_v1.CloudRedisClient() - - # Setup request - name = client.instance_path("[PROJECT]", "[LOCATION]", "[INSTANCE]") - - with pytest.raises(CustomException): - client.get_instance(name) - def test_import_instance(self): # Setup Expected Response name_2 = "name2-1052831874" diff --git a/redis/tests/unit/gapic/v1beta1/test_cloud_redis_client_v1beta1.py b/redis/tests/unit/gapic/v1beta1/test_cloud_redis_client_v1beta1.py index 62cc868a2d09..fb7646612152 100644 --- a/redis/tests/unit/gapic/v1beta1/test_cloud_redis_client_v1beta1.py +++ b/redis/tests/unit/gapic/v1beta1/test_cloud_redis_client_v1beta1.py @@ -66,6 +66,113 @@ class CustomException(Exception): class TestCloudRedisClient(object): + def test_list_instances(self): + # Setup Expected Response + next_page_token = "" + instances_element = {} + instances = [instances_element] + expected_response = {"next_page_token": next_page_token, "instances": instances} + expected_response = cloud_redis_pb2.ListInstancesResponse(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = redis_v1beta1.CloudRedisClient() + + # Setup Request + parent = client.location_path("[PROJECT]", "[LOCATION]") + + paged_list_response = client.list_instances(parent) + resources = list(paged_list_response) + assert len(resources) == 1 + + assert expected_response.instances[0] == resources[0] + + assert len(channel.requests) == 1 + expected_request = cloud_redis_pb2.ListInstancesRequest(parent=parent) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_list_instances_exception(self): + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = redis_v1beta1.CloudRedisClient() + + # Setup request + parent = client.location_path("[PROJECT]", "[LOCATION]") + + paged_list_response = client.list_instances(parent) + with pytest.raises(CustomException): + list(paged_list_response) + + def test_get_instance(self): + # Setup Expected Response + name_2 = "name2-1052831874" + display_name = "displayName1615086568" + location_id = "locationId552319461" + alternative_location_id = "alternativeLocationId-718920621" + redis_version = "redisVersion-685310444" + reserved_ip_range = "reservedIpRange-1082940580" + host = "host3208616" + port = 3446913 + current_location_id = "currentLocationId1312712735" + status_message = "statusMessage-239442758" + memory_size_gb = 34199707 + authorized_network = "authorizedNetwork-1733809270" + persistence_iam_identity = "persistenceIamIdentity1061944584" + expected_response = { + "name": name_2, + "display_name": display_name, + "location_id": location_id, + "alternative_location_id": alternative_location_id, + "redis_version": redis_version, + "reserved_ip_range": reserved_ip_range, + "host": host, + "port": port, + "current_location_id": current_location_id, + "status_message": status_message, + "memory_size_gb": memory_size_gb, + "authorized_network": authorized_network, + "persistence_iam_identity": persistence_iam_identity, + } + expected_response = cloud_redis_pb2.Instance(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = redis_v1beta1.CloudRedisClient() + + # Setup Request + name = client.instance_path("[PROJECT]", "[LOCATION]", "[INSTANCE]") + + response = client.get_instance(name) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = cloud_redis_pb2.GetInstanceRequest(name=name) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_get_instance_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = redis_v1beta1.CloudRedisClient() + + # Setup request + name = client.instance_path("[PROJECT]", "[LOCATION]", "[INSTANCE]") + + with pytest.raises(CustomException): + client.get_instance(name) + def test_create_instance(self): # Setup Expected Response name = "name3373707" @@ -244,113 +351,6 @@ def test_update_instance_exception(self): exception = response.exception() assert exception.errors[0] == error - def test_list_instances(self): - # Setup Expected Response - next_page_token = "" - instances_element = {} - instances = [instances_element] - expected_response = {"next_page_token": next_page_token, "instances": instances} - expected_response = cloud_redis_pb2.ListInstancesResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = redis_v1beta1.CloudRedisClient() - - # Setup Request - parent = client.location_path("[PROJECT]", "[LOCATION]") - - paged_list_response = client.list_instances(parent) - resources = list(paged_list_response) - assert len(resources) == 1 - - assert expected_response.instances[0] == resources[0] - - assert len(channel.requests) == 1 - expected_request = cloud_redis_pb2.ListInstancesRequest(parent=parent) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_list_instances_exception(self): - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = redis_v1beta1.CloudRedisClient() - - # Setup request - parent = client.location_path("[PROJECT]", "[LOCATION]") - - paged_list_response = client.list_instances(parent) - with pytest.raises(CustomException): - list(paged_list_response) - - def test_get_instance(self): - # Setup Expected Response - name_2 = "name2-1052831874" - display_name = "displayName1615086568" - location_id = "locationId552319461" - alternative_location_id = "alternativeLocationId-718920621" - redis_version = "redisVersion-685310444" - reserved_ip_range = "reservedIpRange-1082940580" - host = "host3208616" - port = 3446913 - current_location_id = "currentLocationId1312712735" - status_message = "statusMessage-239442758" - memory_size_gb = 34199707 - authorized_network = "authorizedNetwork-1733809270" - persistence_iam_identity = "persistenceIamIdentity1061944584" - expected_response = { - "name": name_2, - "display_name": display_name, - "location_id": location_id, - "alternative_location_id": alternative_location_id, - "redis_version": redis_version, - "reserved_ip_range": reserved_ip_range, - "host": host, - "port": port, - "current_location_id": current_location_id, - "status_message": status_message, - "memory_size_gb": memory_size_gb, - "authorized_network": authorized_network, - "persistence_iam_identity": persistence_iam_identity, - } - expected_response = cloud_redis_pb2.Instance(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = redis_v1beta1.CloudRedisClient() - - # Setup Request - name = client.instance_path("[PROJECT]", "[LOCATION]", "[INSTANCE]") - - response = client.get_instance(name) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = cloud_redis_pb2.GetInstanceRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_get_instance_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = redis_v1beta1.CloudRedisClient() - - # Setup request - name = client.instance_path("[PROJECT]", "[LOCATION]", "[INSTANCE]") - - with pytest.raises(CustomException): - client.get_instance(name) - def test_import_instance(self): # Setup Expected Response name_2 = "name2-1052831874" diff --git a/resource_manager/CHANGELOG.md b/resource_manager/CHANGELOG.md index 3cb946992452..20619adfc37d 100644 --- a/resource_manager/CHANGELOG.md +++ b/resource_manager/CHANGELOG.md @@ -4,6 +4,23 @@ [1]: https://pypi.org/project/google-cloud-resource-manager/#history +## 0.30.0 + +10-10-2019 11:38 PDT + + +### New Features +- Add `client_options` support. ([#9043](https://github.com/googleapis/google-cloud-python/pull/9043)) + +### Dependencies +- Pin minimum version of `google-cloud-core` to 1.0.3. ([#9043](https://github.com/googleapis/google-cloud-python/pull/9043)) + +### Documentation +- Fix intersphinx reference to requests. ([#9294](https://github.com/googleapis/google-cloud-python/pull/9294)) +- Remove CI for gh-pages, use googleapis.dev for `api_core` refs. ([#9085](https://github.com/googleapis/google-cloud-python/pull/9085)) +- Remove compatability badges from READMEs. ([#9035](https://github.com/googleapis/google-cloud-python/pull/9035)) +- Update intersphinx mapping for requests. ([#8805](https://github.com/googleapis/google-cloud-python/pull/8805)) + ## 0.29.2 07-24-2019 17:25 PDT diff --git a/resource_manager/setup.py b/resource_manager/setup.py index f2f28c680f29..6cc22b82e864 100644 --- a/resource_manager/setup.py +++ b/resource_manager/setup.py @@ -22,7 +22,7 @@ name = 'google-cloud-resource-manager' description = 'Google Cloud Resource Manager API client library' -version = '0.29.2' +version = '0.30.0' # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' diff --git a/runtimeconfig/CHANGELOG.md b/runtimeconfig/CHANGELOG.md index 3c930b633c60..f0438697f676 100644 --- a/runtimeconfig/CHANGELOG.md +++ b/runtimeconfig/CHANGELOG.md @@ -4,6 +4,23 @@ [1]: https://pypi.org/project/google-cloud-runtimeconfig/#history +## 0.30.0 + +10-15-2019 06:53 PDT + + +### New Features +- Add `client_options` to client. ([#9045](https://github.com/googleapis/google-cloud-python/pull/9045)) + +### Dependencies +- Pin 'google-cloud-core >= 1.0.3, < 2.0.0dev'. ([#9445](https://github.com/googleapis/google-cloud-python/pull/9445)) + +### Documentation +- Fix intersphinx reference to requests. ([#9294](https://github.com/googleapis/google-cloud-python/pull/9294)) +- Fix broken links in docs. ([#9148](https://github.com/googleapis/google-cloud-python/pull/9148)) +- Remove compatability badges from READMEs. ([#9035](https://github.com/googleapis/google-cloud-python/pull/9035)) +- Update intersphinx mapping for requests. ([#8805](https://github.com/googleapis/google-cloud-python/pull/8805)) + ## 0.29.2 07-24-2019 17:26 PDT diff --git a/runtimeconfig/setup.py b/runtimeconfig/setup.py index 34b92d567f2e..e519f14b6666 100644 --- a/runtimeconfig/setup.py +++ b/runtimeconfig/setup.py @@ -22,14 +22,14 @@ name = 'google-cloud-runtimeconfig' description = 'Google Cloud RuntimeConfig API client library' -version = '0.29.2' +version = '0.30.0' # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' # 'Development Status :: 5 - Production/Stable' release_status = 'Development Status :: 3 - Alpha' dependencies = [ - "google-cloud-core >= 1.0.0, < 2.0dev", + "google-cloud-core >= 1.0.3, < 2.0dev", ] extras = { } diff --git a/scheduler/google/cloud/scheduler_v1/gapic/cloud_scheduler_client.py b/scheduler/google/cloud/scheduler_v1/gapic/cloud_scheduler_client.py index 68a4f3909189..bbedb85b3332 100644 --- a/scheduler/google/cloud/scheduler_v1/gapic/cloud_scheduler_client.py +++ b/scheduler/google/cloud/scheduler_v1/gapic/cloud_scheduler_client.py @@ -98,13 +98,6 @@ def location_path(cls, project, location): location=location, ) - @classmethod - def project_path(cls, project): - """Return a fully-qualified project string.""" - return google.api_core.path_template.expand( - "projects/{project}", project=project - ) - def __init__( self, transport=None, @@ -251,9 +244,7 @@ def list_jobs( ... pass Args: - parent (str): Required. - - The location name. For example: + parent (str): Required. The location name. For example: ``projects/PROJECT_ID/locations/LOCATION_ID``. page_size (int): The maximum number of resources contained in the underlying API response. If page streaming is performed per- @@ -342,9 +333,7 @@ def get_job( >>> response = client.get_job(name) Args: - name (str): Required. - - The job name. For example: + name (str): Required. The job name. For example: ``projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID``. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will @@ -418,16 +407,12 @@ def create_job( >>> response = client.create_job(parent, job) Args: - parent (str): Required. - - The location name. For example: + parent (str): Required. The location name. For example: ``projects/PROJECT_ID/locations/LOCATION_ID``. - job (Union[dict, ~google.cloud.scheduler_v1.types.Job]): Required. - - The job to add. The user can optionally specify a name for the job in - ``name``. ``name`` cannot be the same as an existing job. If a name is - not specified then the system will generate a random unique name that - will be returned (``name``) in the response. + job (Union[dict, ~google.cloud.scheduler_v1.types.Job]): Required. The job to add. The user can optionally specify a name for the + job in ``name``. ``name`` cannot be the same as an existing job. If a + name is not specified then the system will generate a random unique name + that will be returned (``name``) in the response. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.scheduler_v1.types.Job` @@ -512,9 +497,7 @@ def update_job( >>> response = client.update_job(job, update_mask) Args: - job (Union[dict, ~google.cloud.scheduler_v1.types.Job]): Required. - - The new job properties. ``name`` must be specified. + job (Union[dict, ~google.cloud.scheduler_v1.types.Job]): Required. The new job properties. ``name`` must be specified. Output only fields cannot be modified using UpdateJob. Any value specified for an output only field will be ignored. @@ -593,9 +576,7 @@ def delete_job( >>> client.delete_job(name) Args: - name (str): Required. - - The job name. For example: + name (str): Required. The job name. For example: ``projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID``. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will @@ -667,9 +648,7 @@ def pause_job( >>> response = client.pause_job(name) Args: - name (str): Required. - - The job name. For example: + name (str): Required. The job name. For example: ``projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID``. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will @@ -744,9 +723,7 @@ def resume_job( >>> response = client.resume_job(name) Args: - name (str): Required. - - The job name. For example: + name (str): Required. The job name. For example: ``projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID``. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will @@ -819,9 +796,7 @@ def run_job( >>> response = client.run_job(name) Args: - name (str): Required. - - The job name. For example: + name (str): Required. The job name. For example: ``projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID``. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will diff --git a/scheduler/google/cloud/scheduler_v1/gapic/cloud_scheduler_client_config.py b/scheduler/google/cloud/scheduler_v1/gapic/cloud_scheduler_client_config.py index 31ccca1db159..da963b9d59b0 100644 --- a/scheduler/google/cloud/scheduler_v1/gapic/cloud_scheduler_client_config.py +++ b/scheduler/google/cloud/scheduler_v1/gapic/cloud_scheduler_client_config.py @@ -18,42 +18,42 @@ }, "methods": { "ListJobs": { - "timeout_millis": 30000, + "timeout_millis": 60000, "retry_codes_name": "idempotent", "retry_params_name": "default", }, "GetJob": { - "timeout_millis": 30000, + "timeout_millis": 60000, "retry_codes_name": "idempotent", "retry_params_name": "default", }, "CreateJob": { - "timeout_millis": 30000, + "timeout_millis": 60000, "retry_codes_name": "non_idempotent", "retry_params_name": "default", }, "UpdateJob": { - "timeout_millis": 30000, + "timeout_millis": 60000, "retry_codes_name": "non_idempotent", "retry_params_name": "default", }, "DeleteJob": { - "timeout_millis": 30000, - "retry_codes_name": "idempotent", + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", "retry_params_name": "default", }, "PauseJob": { - "timeout_millis": 30000, + "timeout_millis": 60000, "retry_codes_name": "non_idempotent", "retry_params_name": "default", }, "ResumeJob": { - "timeout_millis": 30000, + "timeout_millis": 60000, "retry_codes_name": "non_idempotent", "retry_params_name": "default", }, "RunJob": { - "timeout_millis": 30000, + "timeout_millis": 60000, "retry_codes_name": "non_idempotent", "retry_params_name": "default", }, diff --git a/scheduler/google/cloud/scheduler_v1/proto/cloudscheduler.proto b/scheduler/google/cloud/scheduler_v1/proto/cloudscheduler.proto index a68446235c38..89ce8cbd338c 100644 --- a/scheduler/google/cloud/scheduler_v1/proto/cloudscheduler.proto +++ b/scheduler/google/cloud/scheduler_v1/proto/cloudscheduler.proto @@ -18,6 +18,8 @@ syntax = "proto3"; package google.cloud.scheduler.v1; import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; import "google/api/resource.proto"; import "google/cloud/scheduler/v1/job.proto"; import "google/protobuf/empty.proto"; @@ -32,11 +34,15 @@ option objc_class_prefix = "SCHEDULER"; // The Cloud Scheduler API allows external entities to reliably // schedule asynchronous jobs. service CloudScheduler { + option (google.api.default_host) = "cloudscheduler.googleapis.com"; + option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; + // Lists jobs. rpc ListJobs(ListJobsRequest) returns (ListJobsResponse) { option (google.api.http) = { get: "/v1/{parent=projects/*/locations/*}/jobs" }; + option (google.api.method_signature) = "parent"; } // Gets a job. @@ -44,6 +50,7 @@ service CloudScheduler { option (google.api.http) = { get: "/v1/{name=projects/*/locations/*/jobs/*}" }; + option (google.api.method_signature) = "name"; } // Creates a job. @@ -52,6 +59,7 @@ service CloudScheduler { post: "/v1/{parent=projects/*/locations/*}/jobs" body: "job" }; + option (google.api.method_signature) = "parent,job"; } // Updates a job. @@ -68,6 +76,7 @@ service CloudScheduler { patch: "/v1/{job.name=projects/*/locations/*/jobs/*}" body: "job" }; + option (google.api.method_signature) = "job,update_mask"; } // Deletes a job. @@ -75,6 +84,7 @@ service CloudScheduler { option (google.api.http) = { delete: "/v1/{name=projects/*/locations/*/jobs/*}" }; + option (google.api.method_signature) = "name"; } // Pauses a job. @@ -89,6 +99,7 @@ service CloudScheduler { post: "/v1/{name=projects/*/locations/*/jobs/*}:pause" body: "*" }; + option (google.api.method_signature) = "name"; } // Resume a job. @@ -102,6 +113,7 @@ service CloudScheduler { post: "/v1/{name=projects/*/locations/*/jobs/*}:resume" body: "*" }; + option (google.api.method_signature) = "name"; } // Forces a job to run now. @@ -113,16 +125,20 @@ service CloudScheduler { post: "/v1/{name=projects/*/locations/*/jobs/*}:run" body: "*" }; + option (google.api.method_signature) = "name"; } } // Request message for listing jobs using [ListJobs][google.cloud.scheduler.v1.CloudScheduler.ListJobs]. message ListJobsRequest { - // Required. - // - // The location name. For example: + // Required. The location name. For example: // `projects/PROJECT_ID/locations/LOCATION_ID`. - string parent = 1; + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + child_type: "cloudscheduler.googleapis.com/Job" + } + ]; // Requested page size. // @@ -159,79 +175,94 @@ message ListJobsResponse { // Request message for [GetJob][google.cloud.scheduler.v1.CloudScheduler.GetJob]. message GetJobRequest { - // Required. - // - // The job name. For example: + // Required. The job name. For example: // `projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID`. - string name = 1; + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "cloudscheduler.googleapis.com/Job" + } + ]; } // Request message for [CreateJob][google.cloud.scheduler.v1.CloudScheduler.CreateJob]. message CreateJobRequest { - // Required. - // - // The location name. For example: + // Required. The location name. For example: // `projects/PROJECT_ID/locations/LOCATION_ID`. - string parent = 1; + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + child_type: "cloudscheduler.googleapis.com/Job" + } + ]; - // Required. - // - // The job to add. The user can optionally specify a name for the + // Required. The job to add. The user can optionally specify a name for the // job in [name][google.cloud.scheduler.v1.Job.name]. [name][google.cloud.scheduler.v1.Job.name] cannot be the same as an // existing job. If a name is not specified then the system will // generate a random unique name that will be returned // ([name][google.cloud.scheduler.v1.Job.name]) in the response. - Job job = 2; + Job job = 2 [(google.api.field_behavior) = REQUIRED]; } // Request message for [UpdateJob][google.cloud.scheduler.v1.CloudScheduler.UpdateJob]. message UpdateJobRequest { - // Required. - // - // The new job properties. [name][google.cloud.scheduler.v1.Job.name] must be specified. + // Required. The new job properties. [name][google.cloud.scheduler.v1.Job.name] must be specified. // // Output only fields cannot be modified using UpdateJob. // Any value specified for an output only field will be ignored. - Job job = 1; + Job job = 1 [(google.api.field_behavior) = REQUIRED]; // A mask used to specify which fields of the job are being updated. - google.protobuf.FieldMask update_mask = 2; + google.protobuf.FieldMask update_mask = 2 + [(google.api.field_behavior) = REQUIRED]; } // Request message for deleting a job using // [DeleteJob][google.cloud.scheduler.v1.CloudScheduler.DeleteJob]. message DeleteJobRequest { - // Required. - // - // The job name. For example: + // Required. The job name. For example: // `projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID`. - string name = 1; + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "cloudscheduler.googleapis.com/Job" + } + ]; } // Request message for [PauseJob][google.cloud.scheduler.v1.CloudScheduler.PauseJob]. message PauseJobRequest { - // Required. - // - // The job name. For example: + // Required. The job name. For example: // `projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID`. - string name = 1; + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "cloudscheduler.googleapis.com/Job" + } + ]; } // Request message for [ResumeJob][google.cloud.scheduler.v1.CloudScheduler.ResumeJob]. message ResumeJobRequest { - // Required. - // - // The job name. For example: + // Required. The job name. For example: // `projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID`. - string name = 1; + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "cloudscheduler.googleapis.com/Job" + } + ]; } // Request message for forcing a job to run now using // [RunJob][google.cloud.scheduler.v1.CloudScheduler.RunJob]. message RunJobRequest { - // Required. - // - // The job name. For example: + // Required. The job name. For example: // `projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID`. - string name = 1; + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "cloudscheduler.googleapis.com/Job" + } + ]; } diff --git a/scheduler/google/cloud/scheduler_v1/proto/cloudscheduler_pb2.py b/scheduler/google/cloud/scheduler_v1/proto/cloudscheduler_pb2.py index 109cb95585aa..4b7e2078b9bd 100644 --- a/scheduler/google/cloud/scheduler_v1/proto/cloudscheduler_pb2.py +++ b/scheduler/google/cloud/scheduler_v1/proto/cloudscheduler_pb2.py @@ -16,6 +16,8 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 from google.cloud.scheduler_v1.proto import ( job_pb2 as google_dot_cloud_dot_scheduler__v1_dot_proto_dot_job__pb2, @@ -32,10 +34,12 @@ "\n\035com.google.cloud.scheduler.v1B\016SchedulerProtoP\001ZBgoogle.golang.org/genproto/googleapis/cloud/scheduler/v1;scheduler\242\002\tSCHEDULER" ), serialized_pb=_b( - '\n4google/cloud/scheduler_v1/proto/cloudscheduler.proto\x12\x19google.cloud.scheduler.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x19google/api/resource.proto\x1a)google/cloud/scheduler_v1/proto/job.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto"H\n\x0fListJobsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x05 \x01(\x05\x12\x12\n\npage_token\x18\x06 \x01(\t"Y\n\x10ListJobsResponse\x12,\n\x04jobs\x18\x01 \x03(\x0b\x32\x1e.google.cloud.scheduler.v1.Job\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"\x1d\n\rGetJobRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"O\n\x10\x43reateJobRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12+\n\x03job\x18\x02 \x01(\x0b\x32\x1e.google.cloud.scheduler.v1.Job"p\n\x10UpdateJobRequest\x12+\n\x03job\x18\x01 \x01(\x0b\x32\x1e.google.cloud.scheduler.v1.Job\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask" \n\x10\x44\x65leteJobRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"\x1f\n\x0fPauseJobRequest\x12\x0c\n\x04name\x18\x01 \x01(\t" \n\x10ResumeJobRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"\x1d\n\rRunJobRequest\x12\x0c\n\x04name\x18\x01 \x01(\t2\x95\t\n\x0e\x43loudScheduler\x12\x95\x01\n\x08ListJobs\x12*.google.cloud.scheduler.v1.ListJobsRequest\x1a+.google.cloud.scheduler.v1.ListJobsResponse"0\x82\xd3\xe4\x93\x02*\x12(/v1/{parent=projects/*/locations/*}/jobs\x12\x84\x01\n\x06GetJob\x12(.google.cloud.scheduler.v1.GetJobRequest\x1a\x1e.google.cloud.scheduler.v1.Job"0\x82\xd3\xe4\x93\x02*\x12(/v1/{name=projects/*/locations/*/jobs/*}\x12\x8f\x01\n\tCreateJob\x12+.google.cloud.scheduler.v1.CreateJobRequest\x1a\x1e.google.cloud.scheduler.v1.Job"5\x82\xd3\xe4\x93\x02/"(/v1/{parent=projects/*/locations/*}/jobs:\x03job\x12\x93\x01\n\tUpdateJob\x12+.google.cloud.scheduler.v1.UpdateJobRequest\x1a\x1e.google.cloud.scheduler.v1.Job"9\x82\xd3\xe4\x93\x02\x33\x32,/v1/{job.name=projects/*/locations/*/jobs/*}:\x03job\x12\x82\x01\n\tDeleteJob\x12+.google.cloud.scheduler.v1.DeleteJobRequest\x1a\x16.google.protobuf.Empty"0\x82\xd3\xe4\x93\x02**(/v1/{name=projects/*/locations/*/jobs/*}\x12\x91\x01\n\x08PauseJob\x12*.google.cloud.scheduler.v1.PauseJobRequest\x1a\x1e.google.cloud.scheduler.v1.Job"9\x82\xd3\xe4\x93\x02\x33"./v1/{name=projects/*/locations/*/jobs/*}:pause:\x01*\x12\x94\x01\n\tResumeJob\x12+.google.cloud.scheduler.v1.ResumeJobRequest\x1a\x1e.google.cloud.scheduler.v1.Job":\x82\xd3\xe4\x93\x02\x34"//v1/{name=projects/*/locations/*/jobs/*}:resume:\x01*\x12\x8b\x01\n\x06RunJob\x12(.google.cloud.scheduler.v1.RunJobRequest\x1a\x1e.google.cloud.scheduler.v1.Job"7\x82\xd3\xe4\x93\x02\x31",/v1/{name=projects/*/locations/*/jobs/*}:run:\x01*B\x81\x01\n\x1d\x63om.google.cloud.scheduler.v1B\x0eSchedulerProtoP\x01ZBgoogle.golang.org/genproto/googleapis/cloud/scheduler/v1;scheduler\xa2\x02\tSCHEDULERb\x06proto3' + '\n4google/cloud/scheduler_v1/proto/cloudscheduler.proto\x12\x19google.cloud.scheduler.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a)google/cloud/scheduler_v1/proto/job.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto"s\n\x0fListJobsRequest\x12\x39\n\x06parent\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\x12!cloudscheduler.googleapis.com/Job\x12\x11\n\tpage_size\x18\x05 \x01(\x05\x12\x12\n\npage_token\x18\x06 \x01(\t"Y\n\x10ListJobsResponse\x12,\n\x04jobs\x18\x01 \x03(\x0b\x32\x1e.google.cloud.scheduler.v1.Job\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"H\n\rGetJobRequest\x12\x37\n\x04name\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\n!cloudscheduler.googleapis.com/Job"\x7f\n\x10\x43reateJobRequest\x12\x39\n\x06parent\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\x12!cloudscheduler.googleapis.com/Job\x12\x30\n\x03job\x18\x02 \x01(\x0b\x32\x1e.google.cloud.scheduler.v1.JobB\x03\xe0\x41\x02"z\n\x10UpdateJobRequest\x12\x30\n\x03job\x18\x01 \x01(\x0b\x32\x1e.google.cloud.scheduler.v1.JobB\x03\xe0\x41\x02\x12\x34\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x02"K\n\x10\x44\x65leteJobRequest\x12\x37\n\x04name\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\n!cloudscheduler.googleapis.com/Job"J\n\x0fPauseJobRequest\x12\x37\n\x04name\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\n!cloudscheduler.googleapis.com/Job"K\n\x10ResumeJobRequest\x12\x37\n\x04name\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\n!cloudscheduler.googleapis.com/Job"H\n\rRunJobRequest\x12\x37\n\x04name\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\n!cloudscheduler.googleapis.com/Job2\xb3\n\n\x0e\x43loudScheduler\x12\x9e\x01\n\x08ListJobs\x12*.google.cloud.scheduler.v1.ListJobsRequest\x1a+.google.cloud.scheduler.v1.ListJobsResponse"9\x82\xd3\xe4\x93\x02*\x12(/v1/{parent=projects/*/locations/*}/jobs\xda\x41\x06parent\x12\x8b\x01\n\x06GetJob\x12(.google.cloud.scheduler.v1.GetJobRequest\x1a\x1e.google.cloud.scheduler.v1.Job"7\x82\xd3\xe4\x93\x02*\x12(/v1/{name=projects/*/locations/*/jobs/*}\xda\x41\x04name\x12\x9c\x01\n\tCreateJob\x12+.google.cloud.scheduler.v1.CreateJobRequest\x1a\x1e.google.cloud.scheduler.v1.Job"B\x82\xd3\xe4\x93\x02/"(/v1/{parent=projects/*/locations/*}/jobs:\x03job\xda\x41\nparent,job\x12\xa5\x01\n\tUpdateJob\x12+.google.cloud.scheduler.v1.UpdateJobRequest\x1a\x1e.google.cloud.scheduler.v1.Job"K\x82\xd3\xe4\x93\x02\x33\x32,/v1/{job.name=projects/*/locations/*/jobs/*}:\x03job\xda\x41\x0fjob,update_mask\x12\x89\x01\n\tDeleteJob\x12+.google.cloud.scheduler.v1.DeleteJobRequest\x1a\x16.google.protobuf.Empty"7\x82\xd3\xe4\x93\x02**(/v1/{name=projects/*/locations/*/jobs/*}\xda\x41\x04name\x12\x98\x01\n\x08PauseJob\x12*.google.cloud.scheduler.v1.PauseJobRequest\x1a\x1e.google.cloud.scheduler.v1.Job"@\x82\xd3\xe4\x93\x02\x33"./v1/{name=projects/*/locations/*/jobs/*}:pause:\x01*\xda\x41\x04name\x12\x9b\x01\n\tResumeJob\x12+.google.cloud.scheduler.v1.ResumeJobRequest\x1a\x1e.google.cloud.scheduler.v1.Job"A\x82\xd3\xe4\x93\x02\x34"//v1/{name=projects/*/locations/*/jobs/*}:resume:\x01*\xda\x41\x04name\x12\x92\x01\n\x06RunJob\x12(.google.cloud.scheduler.v1.RunJobRequest\x1a\x1e.google.cloud.scheduler.v1.Job">\x82\xd3\xe4\x93\x02\x31",/v1/{name=projects/*/locations/*/jobs/*}:run:\x01*\xda\x41\x04name\x1aQ\xca\x41\x1d\x63loudscheduler.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformB\x81\x01\n\x1d\x63om.google.cloud.scheduler.v1B\x0eSchedulerProtoP\x01ZBgoogle.golang.org/genproto/googleapis/cloud/scheduler/v1;scheduler\xa2\x02\tSCHEDULERb\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_client__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, google_dot_api_dot_resource__pb2.DESCRIPTOR, google_dot_cloud_dot_scheduler__v1_dot_proto_dot_job__pb2.DESCRIPTOR, google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, @@ -66,7 +70,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A#\022!cloudscheduler.googleapis.com/Job" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -114,8 +120,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=246, - serialized_end=318, + serialized_start=304, + serialized_end=419, ) @@ -171,8 +177,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=320, - serialized_end=409, + serialized_start=421, + serialized_end=510, ) @@ -198,7 +204,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A#\n!cloudscheduler.googleapis.com/Job" + ), file=DESCRIPTOR, ) ], @@ -210,8 +218,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=411, - serialized_end=440, + serialized_start=512, + serialized_end=584, ) @@ -237,7 +245,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A#\022!cloudscheduler.googleapis.com/Job" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -255,7 +265,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -267,8 +277,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=442, - serialized_end=521, + serialized_start=586, + serialized_end=713, ) @@ -294,7 +304,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -312,7 +322,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -324,8 +334,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=523, - serialized_end=635, + serialized_start=715, + serialized_end=837, ) @@ -351,7 +361,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A#\n!cloudscheduler.googleapis.com/Job" + ), file=DESCRIPTOR, ) ], @@ -363,8 +375,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=637, - serialized_end=669, + serialized_start=839, + serialized_end=914, ) @@ -390,7 +402,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A#\n!cloudscheduler.googleapis.com/Job" + ), file=DESCRIPTOR, ) ], @@ -402,8 +416,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=671, - serialized_end=702, + serialized_start=916, + serialized_end=990, ) @@ -429,7 +443,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A#\n!cloudscheduler.googleapis.com/Job" + ), file=DESCRIPTOR, ) ], @@ -441,8 +457,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=704, - serialized_end=736, + serialized_start=992, + serialized_end=1067, ) @@ -468,7 +484,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A#\n!cloudscheduler.googleapis.com/Job" + ), file=DESCRIPTOR, ) ], @@ -480,8 +498,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=738, - serialized_end=767, + serialized_start=1069, + serialized_end=1141, ) _LISTJOBSRESPONSE.fields_by_name[ @@ -519,7 +537,7 @@ Attributes: parent: - Required. The location name. For example: + Required. The location name. For example: ``projects/PROJECT_ID/locations/LOCATION_ID``. page_size: Requested page size. The maximum page size is 500. If @@ -583,7 +601,7 @@ Attributes: name: - Required. The job name. For example: + Required. The job name. For example: ``projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID``. """, # @@protoc_insertion_point(class_scope:google.cloud.scheduler.v1.GetJobRequest) @@ -603,10 +621,10 @@ Attributes: parent: - Required. The location name. For example: + Required. The location name. For example: ``projects/PROJECT_ID/locations/LOCATION_ID``. job: - Required. The job to add. The user can optionally specify a + Required. The job to add. The user can optionally specify a name for the job in [name][google.cloud.scheduler.v1.Job.name]. [name][google.cloud.scheduler.v1.Job.name] cannot be the same @@ -631,7 +649,7 @@ Attributes: job: - Required. The new job properties. + Required. The new job properties. [name][google.cloud.scheduler.v1.Job.name] must be specified. Output only fields cannot be modified using UpdateJob. Any value specified for an output only field will be ignored. @@ -656,7 +674,7 @@ Attributes: name: - Required. The job name. For example: + Required. The job name. For example: ``projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID``. """, # @@protoc_insertion_point(class_scope:google.cloud.scheduler.v1.DeleteJobRequest) @@ -676,7 +694,7 @@ Attributes: name: - Required. The job name. For example: + Required. The job name. For example: ``projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID``. """, # @@protoc_insertion_point(class_scope:google.cloud.scheduler.v1.PauseJobRequest) @@ -696,7 +714,7 @@ Attributes: name: - Required. The job name. For example: + Required. The job name. For example: ``projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID``. """, # @@protoc_insertion_point(class_scope:google.cloud.scheduler.v1.ResumeJobRequest) @@ -716,7 +734,7 @@ Attributes: name: - Required. The job name. For example: + Required. The job name. For example: ``projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID``. """, # @@protoc_insertion_point(class_scope:google.cloud.scheduler.v1.RunJobRequest) @@ -726,15 +744,27 @@ DESCRIPTOR._options = None +_LISTJOBSREQUEST.fields_by_name["parent"]._options = None +_GETJOBREQUEST.fields_by_name["name"]._options = None +_CREATEJOBREQUEST.fields_by_name["parent"]._options = None +_CREATEJOBREQUEST.fields_by_name["job"]._options = None +_UPDATEJOBREQUEST.fields_by_name["job"]._options = None +_UPDATEJOBREQUEST.fields_by_name["update_mask"]._options = None +_DELETEJOBREQUEST.fields_by_name["name"]._options = None +_PAUSEJOBREQUEST.fields_by_name["name"]._options = None +_RESUMEJOBREQUEST.fields_by_name["name"]._options = None +_RUNJOBREQUEST.fields_by_name["name"]._options = None _CLOUDSCHEDULER = _descriptor.ServiceDescriptor( name="CloudScheduler", full_name="google.cloud.scheduler.v1.CloudScheduler", file=DESCRIPTOR, index=0, - serialized_options=None, - serialized_start=770, - serialized_end=1943, + serialized_options=_b( + "\312A\035cloudscheduler.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform" + ), + serialized_start=1144, + serialized_end=2475, methods=[ _descriptor.MethodDescriptor( name="ListJobs", @@ -744,7 +774,7 @@ input_type=_LISTJOBSREQUEST, output_type=_LISTJOBSRESPONSE, serialized_options=_b( - "\202\323\344\223\002*\022(/v1/{parent=projects/*/locations/*}/jobs" + "\202\323\344\223\002*\022(/v1/{parent=projects/*/locations/*}/jobs\332A\006parent" ), ), _descriptor.MethodDescriptor( @@ -755,7 +785,7 @@ input_type=_GETJOBREQUEST, output_type=google_dot_cloud_dot_scheduler__v1_dot_proto_dot_job__pb2._JOB, serialized_options=_b( - "\202\323\344\223\002*\022(/v1/{name=projects/*/locations/*/jobs/*}" + "\202\323\344\223\002*\022(/v1/{name=projects/*/locations/*/jobs/*}\332A\004name" ), ), _descriptor.MethodDescriptor( @@ -766,7 +796,7 @@ input_type=_CREATEJOBREQUEST, output_type=google_dot_cloud_dot_scheduler__v1_dot_proto_dot_job__pb2._JOB, serialized_options=_b( - '\202\323\344\223\002/"(/v1/{parent=projects/*/locations/*}/jobs:\003job' + '\202\323\344\223\002/"(/v1/{parent=projects/*/locations/*}/jobs:\003job\332A\nparent,job' ), ), _descriptor.MethodDescriptor( @@ -777,7 +807,7 @@ input_type=_UPDATEJOBREQUEST, output_type=google_dot_cloud_dot_scheduler__v1_dot_proto_dot_job__pb2._JOB, serialized_options=_b( - "\202\323\344\223\00232,/v1/{job.name=projects/*/locations/*/jobs/*}:\003job" + "\202\323\344\223\00232,/v1/{job.name=projects/*/locations/*/jobs/*}:\003job\332A\017job,update_mask" ), ), _descriptor.MethodDescriptor( @@ -788,7 +818,7 @@ input_type=_DELETEJOBREQUEST, output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, serialized_options=_b( - "\202\323\344\223\002**(/v1/{name=projects/*/locations/*/jobs/*}" + "\202\323\344\223\002**(/v1/{name=projects/*/locations/*/jobs/*}\332A\004name" ), ), _descriptor.MethodDescriptor( @@ -799,7 +829,7 @@ input_type=_PAUSEJOBREQUEST, output_type=google_dot_cloud_dot_scheduler__v1_dot_proto_dot_job__pb2._JOB, serialized_options=_b( - '\202\323\344\223\0023"./v1/{name=projects/*/locations/*/jobs/*}:pause:\001*' + '\202\323\344\223\0023"./v1/{name=projects/*/locations/*/jobs/*}:pause:\001*\332A\004name' ), ), _descriptor.MethodDescriptor( @@ -810,7 +840,7 @@ input_type=_RESUMEJOBREQUEST, output_type=google_dot_cloud_dot_scheduler__v1_dot_proto_dot_job__pb2._JOB, serialized_options=_b( - '\202\323\344\223\0024"//v1/{name=projects/*/locations/*/jobs/*}:resume:\001*' + '\202\323\344\223\0024"//v1/{name=projects/*/locations/*/jobs/*}:resume:\001*\332A\004name' ), ), _descriptor.MethodDescriptor( @@ -821,7 +851,7 @@ input_type=_RUNJOBREQUEST, output_type=google_dot_cloud_dot_scheduler__v1_dot_proto_dot_job__pb2._JOB, serialized_options=_b( - '\202\323\344\223\0021",/v1/{name=projects/*/locations/*/jobs/*}:run:\001*' + '\202\323\344\223\0021",/v1/{name=projects/*/locations/*/jobs/*}:run:\001*\332A\004name' ), ), ], diff --git a/scheduler/google/cloud/scheduler_v1/proto/job.proto b/scheduler/google/cloud/scheduler_v1/proto/job.proto index 60b47263151b..d26070266b18 100644 --- a/scheduler/google/cloud/scheduler_v1/proto/job.proto +++ b/scheduler/google/cloud/scheduler_v1/proto/job.proto @@ -17,12 +17,12 @@ syntax = "proto3"; package google.cloud.scheduler.v1; -import "google/api/annotations.proto"; import "google/api/resource.proto"; import "google/cloud/scheduler/v1/target.proto"; import "google/protobuf/duration.proto"; import "google/protobuf/timestamp.proto"; import "google/rpc/status.proto"; +import "google/api/annotations.proto"; option go_package = "google.golang.org/genproto/googleapis/cloud/scheduler/v1;scheduler"; option java_multiple_files = true; @@ -32,6 +32,11 @@ option java_package = "com.google.cloud.scheduler.v1"; // Configuration for a job. // The maximum allowed size for a job is 100KB. message Job { + option (google.api.resource) = { + type: "cloudscheduler.googleapis.com/Job" + pattern: "projects/{project}/locations/{location}/jobs/{job}" + }; + // State of the job. enum State { // Unspecified state. diff --git a/scheduler/google/cloud/scheduler_v1/proto/job_pb2.py b/scheduler/google/cloud/scheduler_v1/proto/job_pb2.py index 7f5d3a791458..cfc36eecfcca 100644 --- a/scheduler/google/cloud/scheduler_v1/proto/job_pb2.py +++ b/scheduler/google/cloud/scheduler_v1/proto/job_pb2.py @@ -15,7 +15,6 @@ _sym_db = _symbol_database.Default() -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 from google.cloud.scheduler_v1.proto import ( target_pb2 as google_dot_cloud_dot_scheduler__v1_dot_proto_dot_target__pb2, @@ -23,6 +22,7 @@ from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 from google.rpc import status_pb2 as google_dot_rpc_dot_status__pb2 +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 DESCRIPTOR = _descriptor.FileDescriptor( @@ -33,15 +33,15 @@ "\n\035com.google.cloud.scheduler.v1B\010JobProtoP\001ZBgoogle.golang.org/genproto/googleapis/cloud/scheduler/v1;scheduler" ), serialized_pb=_b( - '\n)google/cloud/scheduler_v1/proto/job.proto\x12\x19google.cloud.scheduler.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x19google/api/resource.proto\x1a,google/cloud/scheduler_v1/proto/target.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto"\xef\x05\n\x03Job\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12@\n\rpubsub_target\x18\x04 \x01(\x0b\x32\'.google.cloud.scheduler.v1.PubsubTargetH\x00\x12P\n\x16\x61pp_engine_http_target\x18\x05 \x01(\x0b\x32..google.cloud.scheduler.v1.AppEngineHttpTargetH\x00\x12<\n\x0bhttp_target\x18\x06 \x01(\x0b\x32%.google.cloud.scheduler.v1.HttpTargetH\x00\x12\x10\n\x08schedule\x18\x14 \x01(\t\x12\x11\n\ttime_zone\x18\x15 \x01(\t\x12\x34\n\x10user_update_time\x18\t \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x33\n\x05state\x18\n \x01(\x0e\x32$.google.cloud.scheduler.v1.Job.State\x12"\n\x06status\x18\x0b \x01(\x0b\x32\x12.google.rpc.Status\x12\x31\n\rschedule_time\x18\x11 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x35\n\x11last_attempt_time\x18\x12 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12<\n\x0cretry_config\x18\x13 \x01(\x0b\x32&.google.cloud.scheduler.v1.RetryConfig\x12\x33\n\x10\x61ttempt_deadline\x18\x16 \x01(\x0b\x32\x19.google.protobuf.Duration"X\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07\x45NABLED\x10\x01\x12\n\n\x06PAUSED\x10\x02\x12\x0c\n\x08\x44ISABLED\x10\x03\x12\x11\n\rUPDATE_FAILED\x10\x04\x42\x08\n\x06target"\xe2\x01\n\x0bRetryConfig\x12\x13\n\x0bretry_count\x18\x01 \x01(\x05\x12\x35\n\x12max_retry_duration\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x37\n\x14min_backoff_duration\x18\x03 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x37\n\x14max_backoff_duration\x18\x04 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x15\n\rmax_doublings\x18\x05 \x01(\x05\x42o\n\x1d\x63om.google.cloud.scheduler.v1B\x08JobProtoP\x01ZBgoogle.golang.org/genproto/googleapis/cloud/scheduler/v1;schedulerb\x06proto3' + '\n)google/cloud/scheduler_v1/proto/job.proto\x12\x19google.cloud.scheduler.v1\x1a\x19google/api/resource.proto\x1a,google/cloud/scheduler_v1/proto/target.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto\x1a\x1cgoogle/api/annotations.proto"\xcb\x06\n\x03Job\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12@\n\rpubsub_target\x18\x04 \x01(\x0b\x32\'.google.cloud.scheduler.v1.PubsubTargetH\x00\x12P\n\x16\x61pp_engine_http_target\x18\x05 \x01(\x0b\x32..google.cloud.scheduler.v1.AppEngineHttpTargetH\x00\x12<\n\x0bhttp_target\x18\x06 \x01(\x0b\x32%.google.cloud.scheduler.v1.HttpTargetH\x00\x12\x10\n\x08schedule\x18\x14 \x01(\t\x12\x11\n\ttime_zone\x18\x15 \x01(\t\x12\x34\n\x10user_update_time\x18\t \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x33\n\x05state\x18\n \x01(\x0e\x32$.google.cloud.scheduler.v1.Job.State\x12"\n\x06status\x18\x0b \x01(\x0b\x32\x12.google.rpc.Status\x12\x31\n\rschedule_time\x18\x11 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x35\n\x11last_attempt_time\x18\x12 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12<\n\x0cretry_config\x18\x13 \x01(\x0b\x32&.google.cloud.scheduler.v1.RetryConfig\x12\x33\n\x10\x61ttempt_deadline\x18\x16 \x01(\x0b\x32\x19.google.protobuf.Duration"X\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07\x45NABLED\x10\x01\x12\n\n\x06PAUSED\x10\x02\x12\x0c\n\x08\x44ISABLED\x10\x03\x12\x11\n\rUPDATE_FAILED\x10\x04:Z\xea\x41W\n!cloudscheduler.googleapis.com/Job\x12\x32projects/{project}/locations/{location}/jobs/{job}B\x08\n\x06target"\xe2\x01\n\x0bRetryConfig\x12\x13\n\x0bretry_count\x18\x01 \x01(\x05\x12\x35\n\x12max_retry_duration\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x37\n\x14min_backoff_duration\x18\x03 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x37\n\x14max_backoff_duration\x18\x04 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x15\n\rmax_doublings\x18\x05 \x01(\x05\x42o\n\x1d\x63om.google.cloud.scheduler.v1B\x08JobProtoP\x01ZBgoogle.golang.org/genproto/googleapis/cloud/scheduler/v1;schedulerb\x06proto3' ), dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, google_dot_api_dot_resource__pb2.DESCRIPTOR, google_dot_cloud_dot_scheduler__v1_dot_proto_dot_target__pb2.DESCRIPTOR, google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, google_dot_rpc_dot_status__pb2.DESCRIPTOR, + google_dot_api_dot_annotations__pb2.DESCRIPTOR, ], ) @@ -343,7 +343,9 @@ extensions=[], nested_types=[], enum_types=[_JOB_STATE], - serialized_options=None, + serialized_options=_b( + "\352AW\n!cloudscheduler.googleapis.com/Job\0222projects/{project}/locations/{location}/jobs/{job}" + ), is_extendable=False, syntax="proto3", extension_ranges=[], @@ -357,7 +359,7 @@ ) ], serialized_start=266, - serialized_end=1017, + serialized_end=1109, ) @@ -467,8 +469,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1020, - serialized_end=1246, + serialized_start=1112, + serialized_end=1338, ) _JOB.fields_by_name[ @@ -700,4 +702,5 @@ DESCRIPTOR._options = None +_JOB._options = None # @@protoc_insertion_point(module_scope) diff --git a/scheduler/google/cloud/scheduler_v1/proto/target.proto b/scheduler/google/cloud/scheduler_v1/proto/target.proto index e33b1558e53d..9a8f32f7c60e 100644 --- a/scheduler/google/cloud/scheduler_v1/proto/target.proto +++ b/scheduler/google/cloud/scheduler_v1/proto/target.proto @@ -17,8 +17,8 @@ syntax = "proto3"; package google.cloud.scheduler.v1; +import "google/api/resource.proto"; import "google/api/annotations.proto"; -import "google/protobuf/any.proto"; option go_package = "google.golang.org/genproto/googleapis/cloud/scheduler/v1;scheduler"; option java_multiple_files = true; @@ -32,9 +32,7 @@ option java_package = "com.google.cloud.scheduler.v1"; // constitutes a failed execution. For a redirected request, the response // returned by the redirected request is considered. message HttpTarget { - // Required. - // - // The full URI path that the request will be sent to. This string + // Required. The full URI path that the request will be sent to. This string // must begin with either "http://" or "https://". Some examples of // valid values for [uri][google.cloud.scheduler.v1.HttpTarget.uri] are: // `http://acme.com` and `https://acme.com/sales:8080`. Cloud Scheduler will @@ -77,8 +75,8 @@ message HttpTarget { // will be generated and attached as an `Authorization` header in the HTTP // request. // - // This type of authorization should be used when sending requests to a GCP - // endpoint. + // This type of authorization should generally only be used when calling + // Google APIs hosted on *.googleapis.com. OAuthToken oauth_token = 5; // If specified, an @@ -86,8 +84,9 @@ message HttpTarget { // token will be generated and attached as an `Authorization` header in the // HTTP request. // - // This type of authorization should be used when sending requests to third - // party endpoints or Cloud Run. + // This type of authorization can be used for many scenarios, including + // calling Cloud Run, or endpoints where you intend to validate the token + // yourself. OidcToken oidc_token = 6; } } @@ -162,16 +161,16 @@ message AppEngineHttpTarget { // Pub/Sub target. The job will be delivered by publishing a message to // the given Pub/Sub topic. message PubsubTarget { - // Required. - // - // The name of the Cloud Pub/Sub topic to which messages will + // Required. The name of the Cloud Pub/Sub topic to which messages will // be published when a job is delivered. The topic name must be in the // same format as required by PubSub's // [PublishRequest.name](https://cloud.google.com/pubsub/docs/reference/rpc/google.pubsub.v1#publishrequest), // for example `projects/PROJECT_ID/topics/TOPIC_ID`. // // The topic must be in the same project as the Cloud Scheduler job. - string topic_name = 1; + string topic_name = 1 [(google.api.resource_reference) = { + type: "pubsub.googleapis.com/Topic" + }]; // The message payload for PubsubMessage. // @@ -315,8 +314,8 @@ enum HttpMethod { // Contains information needed for generating an // [OAuth token](https://developers.google.com/identity/protocols/OAuth2). -// This type of authorization should be used when sending requests to a GCP -// endpoint. +// This type of authorization should generally only be used when calling Google +// APIs hosted on *.googleapis.com. message OAuthToken { // [Service account email](https://cloud.google.com/iam/docs/service-accounts) // to be used for generating OAuth token. @@ -332,9 +331,10 @@ message OAuthToken { // Contains information needed for generating an // [OpenID Connect -// token](https://developers.google.com/identity/protocols/OpenIDConnect). This -// type of authorization should be used when sending requests to third party -// endpoints or Cloud Run. +// token](https://developers.google.com/identity/protocols/OpenIDConnect). +// This type of authorization can be used for many scenarios, including +// calling Cloud Run, or endpoints where you intend to validate the token +// yourself. message OidcToken { // [Service account email](https://cloud.google.com/iam/docs/service-accounts) // to be used for generating OIDC token. @@ -346,3 +346,11 @@ message OidcToken { // specified in target will be used. string audience = 2; } + +// The Pub/Sub Topic resource definition is in google/cloud/pubsub/v1/, +// but we do not import that proto directly; therefore, we redefine the +// pattern here. +option (google.api.resource_definition) = { + type: "pubsub.googleapis.com/Topic" + pattern: "projects/{project}/topics/{topic}" +}; diff --git a/scheduler/google/cloud/scheduler_v1/proto/target_pb2.py b/scheduler/google/cloud/scheduler_v1/proto/target_pb2.py index c6cb1357681c..6d25c3da2a4b 100644 --- a/scheduler/google/cloud/scheduler_v1/proto/target_pb2.py +++ b/scheduler/google/cloud/scheduler_v1/proto/target_pb2.py @@ -16,8 +16,8 @@ _sym_db = _symbol_database.Default() +from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2 DESCRIPTOR = _descriptor.FileDescriptor( @@ -25,14 +25,14 @@ package="google.cloud.scheduler.v1", syntax="proto3", serialized_options=_b( - "\n\035com.google.cloud.scheduler.v1B\013TargetProtoP\001ZBgoogle.golang.org/genproto/googleapis/cloud/scheduler/v1;scheduler" + "\n\035com.google.cloud.scheduler.v1B\013TargetProtoP\001ZBgoogle.golang.org/genproto/googleapis/cloud/scheduler/v1;scheduler\352A@\n\033pubsub.googleapis.com/Topic\022!projects/{project}/topics/{topic}" ), serialized_pb=_b( - '\n,google/cloud/scheduler_v1/proto/target.proto\x12\x19google.cloud.scheduler.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x19google/protobuf/any.proto"\xea\x02\n\nHttpTarget\x12\x0b\n\x03uri\x18\x01 \x01(\t\x12:\n\x0bhttp_method\x18\x02 \x01(\x0e\x32%.google.cloud.scheduler.v1.HttpMethod\x12\x43\n\x07headers\x18\x03 \x03(\x0b\x32\x32.google.cloud.scheduler.v1.HttpTarget.HeadersEntry\x12\x0c\n\x04\x62ody\x18\x04 \x01(\x0c\x12<\n\x0boauth_token\x18\x05 \x01(\x0b\x32%.google.cloud.scheduler.v1.OAuthTokenH\x00\x12:\n\noidc_token\x18\x06 \x01(\x0b\x32$.google.cloud.scheduler.v1.OidcTokenH\x00\x1a.\n\x0cHeadersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x16\n\x14\x61uthorization_header"\xbc\x02\n\x13\x41ppEngineHttpTarget\x12:\n\x0bhttp_method\x18\x01 \x01(\x0e\x32%.google.cloud.scheduler.v1.HttpMethod\x12G\n\x12\x61pp_engine_routing\x18\x02 \x01(\x0b\x32+.google.cloud.scheduler.v1.AppEngineRouting\x12\x14\n\x0crelative_uri\x18\x03 \x01(\t\x12L\n\x07headers\x18\x04 \x03(\x0b\x32;.google.cloud.scheduler.v1.AppEngineHttpTarget.HeadersEntry\x12\x0c\n\x04\x62ody\x18\x05 \x01(\x0c\x1a.\n\x0cHeadersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xb0\x01\n\x0cPubsubTarget\x12\x12\n\ntopic_name\x18\x01 \x01(\t\x12\x0c\n\x04\x64\x61ta\x18\x03 \x01(\x0c\x12K\n\nattributes\x18\x04 \x03(\x0b\x32\x37.google.cloud.scheduler.v1.PubsubTarget.AttributesEntry\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"T\n\x10\x41ppEngineRouting\x12\x0f\n\x07service\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\t\x12\x10\n\x08instance\x18\x03 \x01(\t\x12\x0c\n\x04host\x18\x04 \x01(\t":\n\nOAuthToken\x12\x1d\n\x15service_account_email\x18\x01 \x01(\t\x12\r\n\x05scope\x18\x02 \x01(\t"<\n\tOidcToken\x12\x1d\n\x15service_account_email\x18\x01 \x01(\t\x12\x10\n\x08\x61udience\x18\x02 \x01(\t*s\n\nHttpMethod\x12\x1b\n\x17HTTP_METHOD_UNSPECIFIED\x10\x00\x12\x08\n\x04POST\x10\x01\x12\x07\n\x03GET\x10\x02\x12\x08\n\x04HEAD\x10\x03\x12\x07\n\x03PUT\x10\x04\x12\n\n\x06\x44\x45LETE\x10\x05\x12\t\n\x05PATCH\x10\x06\x12\x0b\n\x07OPTIONS\x10\x07\x42r\n\x1d\x63om.google.cloud.scheduler.v1B\x0bTargetProtoP\x01ZBgoogle.golang.org/genproto/googleapis/cloud/scheduler/v1;schedulerb\x06proto3' + '\n,google/cloud/scheduler_v1/proto/target.proto\x12\x19google.cloud.scheduler.v1\x1a\x19google/api/resource.proto\x1a\x1cgoogle/api/annotations.proto"\xea\x02\n\nHttpTarget\x12\x0b\n\x03uri\x18\x01 \x01(\t\x12:\n\x0bhttp_method\x18\x02 \x01(\x0e\x32%.google.cloud.scheduler.v1.HttpMethod\x12\x43\n\x07headers\x18\x03 \x03(\x0b\x32\x32.google.cloud.scheduler.v1.HttpTarget.HeadersEntry\x12\x0c\n\x04\x62ody\x18\x04 \x01(\x0c\x12<\n\x0boauth_token\x18\x05 \x01(\x0b\x32%.google.cloud.scheduler.v1.OAuthTokenH\x00\x12:\n\noidc_token\x18\x06 \x01(\x0b\x32$.google.cloud.scheduler.v1.OidcTokenH\x00\x1a.\n\x0cHeadersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x16\n\x14\x61uthorization_header"\xbc\x02\n\x13\x41ppEngineHttpTarget\x12:\n\x0bhttp_method\x18\x01 \x01(\x0e\x32%.google.cloud.scheduler.v1.HttpMethod\x12G\n\x12\x61pp_engine_routing\x18\x02 \x01(\x0b\x32+.google.cloud.scheduler.v1.AppEngineRouting\x12\x14\n\x0crelative_uri\x18\x03 \x01(\t\x12L\n\x07headers\x18\x04 \x03(\x0b\x32;.google.cloud.scheduler.v1.AppEngineHttpTarget.HeadersEntry\x12\x0c\n\x04\x62ody\x18\x05 \x01(\x0c\x1a.\n\x0cHeadersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xd2\x01\n\x0cPubsubTarget\x12\x34\n\ntopic_name\x18\x01 \x01(\tB \xfa\x41\x1d\n\x1bpubsub.googleapis.com/Topic\x12\x0c\n\x04\x64\x61ta\x18\x03 \x01(\x0c\x12K\n\nattributes\x18\x04 \x03(\x0b\x32\x37.google.cloud.scheduler.v1.PubsubTarget.AttributesEntry\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"T\n\x10\x41ppEngineRouting\x12\x0f\n\x07service\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\t\x12\x10\n\x08instance\x18\x03 \x01(\t\x12\x0c\n\x04host\x18\x04 \x01(\t":\n\nOAuthToken\x12\x1d\n\x15service_account_email\x18\x01 \x01(\t\x12\r\n\x05scope\x18\x02 \x01(\t"<\n\tOidcToken\x12\x1d\n\x15service_account_email\x18\x01 \x01(\t\x12\x10\n\x08\x61udience\x18\x02 \x01(\t*s\n\nHttpMethod\x12\x1b\n\x17HTTP_METHOD_UNSPECIFIED\x10\x00\x12\x08\n\x04POST\x10\x01\x12\x07\n\x03GET\x10\x02\x12\x08\n\x04HEAD\x10\x03\x12\x07\n\x03PUT\x10\x04\x12\n\n\x06\x44\x45LETE\x10\x05\x12\t\n\x05PATCH\x10\x06\x12\x0b\n\x07OPTIONS\x10\x07\x42\xb5\x01\n\x1d\x63om.google.cloud.scheduler.v1B\x0bTargetProtoP\x01ZBgoogle.golang.org/genproto/googleapis/cloud/scheduler/v1;scheduler\xea\x41@\n\x1bpubsub.googleapis.com/Topic\x12!projects/{project}/topics/{topic}b\x06proto3' ), dependencies=[ + google_dot_api_dot_resource__pb2.DESCRIPTOR, google_dot_api_dot_annotations__pb2.DESCRIPTOR, - google_dot_protobuf_dot_any__pb2.DESCRIPTOR, ], ) @@ -73,8 +73,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=1203, - serialized_end=1318, + serialized_start=1237, + serialized_end=1352, ) _sym_db.RegisterEnumDescriptor(_HTTPMETHOD) @@ -501,8 +501,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=944, - serialized_end=993, + serialized_start=978, + serialized_end=1027, ) _PUBSUBTARGET = _descriptor.Descriptor( @@ -527,7 +527,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\372A\035\n\033pubsub.googleapis.com/Topic"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -576,7 +576,7 @@ extension_ranges=[], oneofs=[], serialized_start=817, - serialized_end=993, + serialized_end=1027, ) @@ -668,8 +668,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=995, - serialized_end=1079, + serialized_start=1029, + serialized_end=1113, ) @@ -725,8 +725,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1081, - serialized_end=1139, + serialized_start=1115, + serialized_end=1173, ) @@ -782,8 +782,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1141, - serialized_end=1201, + serialized_start=1175, + serialized_end=1235, ) _HTTPTARGET_HEADERSENTRY.containing_type = _HTTPTARGET @@ -848,7 +848,7 @@ Attributes: uri: - Required. The full URI path that the request will be sent to. + Required. The full URI path that the request will be sent to. This string must begin with either "http://" or "https://". Some examples of valid values for [uri][google.cloud.scheduler.v1.HttpTarget.uri] are: @@ -887,14 +887,16 @@ If specified, an `OAuth token `__ will be generated and attached as an ``Authorization`` header - in the HTTP request. This type of authorization should be - used when sending requests to a GCP endpoint. + in the HTTP request. This type of authorization should + generally only be used when calling Google APIs hosted on + \*.googleapis.com. oidc_token: If specified, an `OIDC `__ token will be generated and attached as an ``Authorization`` header in the HTTP request. - This type of authorization should be used when sending - requests to third party endpoints or Cloud Run. + This type of authorization can be used for many scenarios, + including calling Cloud Run, or endpoints where you intend to + validate the token yourself. """, # @@protoc_insertion_point(class_scope:google.cloud.scheduler.v1.HttpTarget) ), @@ -999,7 +1001,7 @@ Attributes: topic_name: - Required. The name of the Cloud Pub/Sub topic to which + Required. The name of the Cloud Pub/Sub topic to which messages will be published when a job is delivered. The topic name must be in the same format as required by PubSub's `PublishRequest.name `__. This - type of authorization should be used when sending requests to a GCP - endpoint. + type of authorization should generally only be used when calling Google + APIs hosted on \*.googleapis.com. Attributes: @@ -1153,8 +1155,9 @@ __module__="google.cloud.scheduler_v1.proto.target_pb2", __doc__="""Contains information needed for generating an `OpenID Connect token `__. - This type of authorization should be used when sending requests to third - party endpoints or Cloud Run. + This type of authorization can be used for many scenarios, including + calling Cloud Run, or endpoints where you intend to validate the token + yourself. Attributes: @@ -1178,4 +1181,5 @@ _HTTPTARGET_HEADERSENTRY._options = None _APPENGINEHTTPTARGET_HEADERSENTRY._options = None _PUBSUBTARGET_ATTRIBUTESENTRY._options = None +_PUBSUBTARGET.fields_by_name["topic_name"]._options = None # @@protoc_insertion_point(module_scope) diff --git a/scheduler/google/cloud/scheduler_v1beta1/gapic/cloud_scheduler_client.py b/scheduler/google/cloud/scheduler_v1beta1/gapic/cloud_scheduler_client.py index 50879081907b..446baffa4c41 100644 --- a/scheduler/google/cloud/scheduler_v1beta1/gapic/cloud_scheduler_client.py +++ b/scheduler/google/cloud/scheduler_v1beta1/gapic/cloud_scheduler_client.py @@ -100,13 +100,6 @@ def location_path(cls, project, location): location=location, ) - @classmethod - def project_path(cls, project): - """Return a fully-qualified project string.""" - return google.api_core.path_template.expand( - "projects/{project}", project=project - ) - def __init__( self, transport=None, @@ -253,9 +246,7 @@ def list_jobs( ... pass Args: - parent (str): Required. - - The location name. For example: + parent (str): Required. The location name. For example: ``projects/PROJECT_ID/locations/LOCATION_ID``. page_size (int): The maximum number of resources contained in the underlying API response. If page streaming is performed per- @@ -344,9 +335,7 @@ def get_job( >>> response = client.get_job(name) Args: - name (str): Required. - - The job name. For example: + name (str): Required. The job name. For example: ``projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID``. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will @@ -420,16 +409,12 @@ def create_job( >>> response = client.create_job(parent, job) Args: - parent (str): Required. - - The location name. For example: + parent (str): Required. The location name. For example: ``projects/PROJECT_ID/locations/LOCATION_ID``. - job (Union[dict, ~google.cloud.scheduler_v1beta1.types.Job]): Required. - - The job to add. The user can optionally specify a name for the job in - ``name``. ``name`` cannot be the same as an existing job. If a name is - not specified then the system will generate a random unique name that - will be returned (``name``) in the response. + job (Union[dict, ~google.cloud.scheduler_v1beta1.types.Job]): Required. The job to add. The user can optionally specify a name for the + job in ``name``. ``name`` cannot be the same as an existing job. If a + name is not specified then the system will generate a random unique name + that will be returned (``name``) in the response. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.scheduler_v1beta1.types.Job` @@ -511,9 +496,7 @@ def update_job( >>> response = client.update_job(job) Args: - job (Union[dict, ~google.cloud.scheduler_v1beta1.types.Job]): Required. - - The new job properties. ``name`` must be specified. + job (Union[dict, ~google.cloud.scheduler_v1beta1.types.Job]): Required. The new job properties. ``name`` must be specified. Output only fields cannot be modified using UpdateJob. Any value specified for an output only field will be ignored. @@ -592,9 +575,7 @@ def delete_job( >>> client.delete_job(name) Args: - name (str): Required. - - The job name. For example: + name (str): Required. The job name. For example: ``projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID``. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will @@ -666,9 +647,7 @@ def pause_job( >>> response = client.pause_job(name) Args: - name (str): Required. - - The job name. For example: + name (str): Required. The job name. For example: ``projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID``. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will @@ -743,9 +722,7 @@ def resume_job( >>> response = client.resume_job(name) Args: - name (str): Required. - - The job name. For example: + name (str): Required. The job name. For example: ``projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID``. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will @@ -818,9 +795,7 @@ def run_job( >>> response = client.run_job(name) Args: - name (str): Required. - - The job name. For example: + name (str): Required. The job name. For example: ``projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID``. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will diff --git a/scheduler/google/cloud/scheduler_v1beta1/gapic/cloud_scheduler_client_config.py b/scheduler/google/cloud/scheduler_v1beta1/gapic/cloud_scheduler_client_config.py index d972bce85f76..f2dce757b3ec 100644 --- a/scheduler/google/cloud/scheduler_v1beta1/gapic/cloud_scheduler_client_config.py +++ b/scheduler/google/cloud/scheduler_v1beta1/gapic/cloud_scheduler_client_config.py @@ -18,42 +18,42 @@ }, "methods": { "ListJobs": { - "timeout_millis": 10000, + "timeout_millis": 60000, "retry_codes_name": "idempotent", "retry_params_name": "default", }, "GetJob": { - "timeout_millis": 10000, + "timeout_millis": 60000, "retry_codes_name": "idempotent", "retry_params_name": "default", }, "CreateJob": { - "timeout_millis": 10000, + "timeout_millis": 60000, "retry_codes_name": "non_idempotent", "retry_params_name": "default", }, "UpdateJob": { - "timeout_millis": 10000, + "timeout_millis": 60000, "retry_codes_name": "non_idempotent", "retry_params_name": "default", }, "DeleteJob": { - "timeout_millis": 10000, - "retry_codes_name": "idempotent", + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", "retry_params_name": "default", }, "PauseJob": { - "timeout_millis": 10000, - "retry_codes_name": "idempotent", + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", "retry_params_name": "default", }, "ResumeJob": { - "timeout_millis": 10000, - "retry_codes_name": "idempotent", + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", "retry_params_name": "default", }, "RunJob": { - "timeout_millis": 10000, + "timeout_millis": 60000, "retry_codes_name": "non_idempotent", "retry_params_name": "default", }, diff --git a/scheduler/google/cloud/scheduler_v1beta1/proto/cloudscheduler.proto b/scheduler/google/cloud/scheduler_v1beta1/proto/cloudscheduler.proto index 4c1d9661839e..4f86b7a56218 100644 --- a/scheduler/google/cloud/scheduler_v1beta1/proto/cloudscheduler.proto +++ b/scheduler/google/cloud/scheduler_v1beta1/proto/cloudscheduler.proto @@ -18,6 +18,8 @@ syntax = "proto3"; package google.cloud.scheduler.v1beta1; import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; import "google/api/resource.proto"; import "google/cloud/scheduler/v1beta1/job.proto"; import "google/protobuf/empty.proto"; @@ -32,11 +34,15 @@ option objc_class_prefix = "SCHEDULER"; // The Cloud Scheduler API allows external entities to reliably // schedule asynchronous jobs. service CloudScheduler { + option (google.api.default_host) = "cloudscheduler.googleapis.com"; + option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; + // Lists jobs. rpc ListJobs(ListJobsRequest) returns (ListJobsResponse) { option (google.api.http) = { get: "/v1beta1/{parent=projects/*/locations/*}/jobs" }; + option (google.api.method_signature) = "parent"; } // Gets a job. @@ -44,6 +50,7 @@ service CloudScheduler { option (google.api.http) = { get: "/v1beta1/{name=projects/*/locations/*/jobs/*}" }; + option (google.api.method_signature) = "name"; } // Creates a job. @@ -52,6 +59,7 @@ service CloudScheduler { post: "/v1beta1/{parent=projects/*/locations/*}/jobs" body: "job" }; + option (google.api.method_signature) = "parent,job"; } // Updates a job. @@ -68,6 +76,7 @@ service CloudScheduler { patch: "/v1beta1/{job.name=projects/*/locations/*/jobs/*}" body: "job" }; + option (google.api.method_signature) = "job,update_mask"; } // Deletes a job. @@ -75,6 +84,7 @@ service CloudScheduler { option (google.api.http) = { delete: "/v1beta1/{name=projects/*/locations/*/jobs/*}" }; + option (google.api.method_signature) = "name"; } // Pauses a job. @@ -89,6 +99,7 @@ service CloudScheduler { post: "/v1beta1/{name=projects/*/locations/*/jobs/*}:pause" body: "*" }; + option (google.api.method_signature) = "name"; } // Resume a job. @@ -102,6 +113,7 @@ service CloudScheduler { post: "/v1beta1/{name=projects/*/locations/*/jobs/*}:resume" body: "*" }; + option (google.api.method_signature) = "name"; } // Forces a job to run now. @@ -113,16 +125,20 @@ service CloudScheduler { post: "/v1beta1/{name=projects/*/locations/*/jobs/*}:run" body: "*" }; + option (google.api.method_signature) = "name"; } } // Request message for listing jobs using [ListJobs][google.cloud.scheduler.v1beta1.CloudScheduler.ListJobs]. message ListJobsRequest { - // Required. - // - // The location name. For example: + // Required. The location name. For example: // `projects/PROJECT_ID/locations/LOCATION_ID`. - string parent = 1; + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + child_type: "cloudscheduler.googleapis.com/Job" + } + ]; // Requested page size. // @@ -159,40 +175,42 @@ message ListJobsResponse { // Request message for [GetJob][google.cloud.scheduler.v1beta1.CloudScheduler.GetJob]. message GetJobRequest { - // Required. - // - // The job name. For example: + // Required. The job name. For example: // `projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID`. - string name = 1; + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "cloudscheduler.googleapis.com/Job" + } + ]; } // Request message for [CreateJob][google.cloud.scheduler.v1beta1.CloudScheduler.CreateJob]. message CreateJobRequest { - // Required. - // - // The location name. For example: + // Required. The location name. For example: // `projects/PROJECT_ID/locations/LOCATION_ID`. - string parent = 1; + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + child_type: "cloudscheduler.googleapis.com/Job" + } + ]; - // Required. - // - // The job to add. The user can optionally specify a name for the + // Required. The job to add. The user can optionally specify a name for the // job in [name][google.cloud.scheduler.v1beta1.Job.name]. [name][google.cloud.scheduler.v1beta1.Job.name] cannot be the same as an // existing job. If a name is not specified then the system will // generate a random unique name that will be returned // ([name][google.cloud.scheduler.v1beta1.Job.name]) in the response. - Job job = 2; + Job job = 2 [(google.api.field_behavior) = REQUIRED]; } // Request message for [UpdateJob][google.cloud.scheduler.v1beta1.CloudScheduler.UpdateJob]. message UpdateJobRequest { - // Required. - // - // The new job properties. [name][google.cloud.scheduler.v1beta1.Job.name] must be specified. + // Required. The new job properties. [name][google.cloud.scheduler.v1beta1.Job.name] must be specified. // // Output only fields cannot be modified using UpdateJob. // Any value specified for an output only field will be ignored. - Job job = 1; + Job job = 1 [(google.api.field_behavior) = REQUIRED]; // A mask used to specify which fields of the job are being updated. google.protobuf.FieldMask update_mask = 2; @@ -201,37 +219,49 @@ message UpdateJobRequest { // Request message for deleting a job using // [DeleteJob][google.cloud.scheduler.v1beta1.CloudScheduler.DeleteJob]. message DeleteJobRequest { - // Required. - // - // The job name. For example: + // Required. The job name. For example: // `projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID`. - string name = 1; + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "cloudscheduler.googleapis.com/Job" + } + ]; } // Request message for [PauseJob][google.cloud.scheduler.v1beta1.CloudScheduler.PauseJob]. message PauseJobRequest { - // Required. - // - // The job name. For example: + // Required. The job name. For example: // `projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID`. - string name = 1; + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "cloudscheduler.googleapis.com/Job" + } + ]; } // Request message for [ResumeJob][google.cloud.scheduler.v1beta1.CloudScheduler.ResumeJob]. message ResumeJobRequest { - // Required. - // - // The job name. For example: + // Required. The job name. For example: // `projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID`. - string name = 1; + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "cloudscheduler.googleapis.com/Job" + } + ]; } // Request message for forcing a job to run now using // [RunJob][google.cloud.scheduler.v1beta1.CloudScheduler.RunJob]. message RunJobRequest { - // Required. - // - // The job name. For example: + // Required. The job name. For example: // `projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID`. - string name = 1; + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "cloudscheduler.googleapis.com/Job" + } + ]; } diff --git a/scheduler/google/cloud/scheduler_v1beta1/proto/cloudscheduler_pb2.py b/scheduler/google/cloud/scheduler_v1beta1/proto/cloudscheduler_pb2.py index f85e45704dc3..3ce738ced993 100644 --- a/scheduler/google/cloud/scheduler_v1beta1/proto/cloudscheduler_pb2.py +++ b/scheduler/google/cloud/scheduler_v1beta1/proto/cloudscheduler_pb2.py @@ -16,6 +16,8 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 from google.cloud.scheduler_v1beta1.proto import ( job_pb2 as google_dot_cloud_dot_scheduler__v1beta1_dot_proto_dot_job__pb2, @@ -32,10 +34,12 @@ '\n"com.google.cloud.scheduler.v1beta1B\016SchedulerProtoP\001ZGgoogle.golang.org/genproto/googleapis/cloud/scheduler/v1beta1;scheduler\242\002\tSCHEDULER' ), serialized_pb=_b( - '\n9google/cloud/scheduler_v1beta1/proto/cloudscheduler.proto\x12\x1egoogle.cloud.scheduler.v1beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x19google/api/resource.proto\x1a.google/cloud/scheduler_v1beta1/proto/job.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto"H\n\x0fListJobsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x05 \x01(\x05\x12\x12\n\npage_token\x18\x06 \x01(\t"^\n\x10ListJobsResponse\x12\x31\n\x04jobs\x18\x01 \x03(\x0b\x32#.google.cloud.scheduler.v1beta1.Job\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"\x1d\n\rGetJobRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"T\n\x10\x43reateJobRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x30\n\x03job\x18\x02 \x01(\x0b\x32#.google.cloud.scheduler.v1beta1.Job"u\n\x10UpdateJobRequest\x12\x30\n\x03job\x18\x01 \x01(\x0b\x32#.google.cloud.scheduler.v1beta1.Job\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask" \n\x10\x44\x65leteJobRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"\x1f\n\x0fPauseJobRequest\x12\x0c\n\x04name\x18\x01 \x01(\t" \n\x10ResumeJobRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"\x1d\n\rRunJobRequest\x12\x0c\n\x04name\x18\x01 \x01(\t2\x88\n\n\x0e\x43loudScheduler\x12\xa4\x01\n\x08ListJobs\x12/.google.cloud.scheduler.v1beta1.ListJobsRequest\x1a\x30.google.cloud.scheduler.v1beta1.ListJobsResponse"5\x82\xd3\xe4\x93\x02/\x12-/v1beta1/{parent=projects/*/locations/*}/jobs\x12\x93\x01\n\x06GetJob\x12-.google.cloud.scheduler.v1beta1.GetJobRequest\x1a#.google.cloud.scheduler.v1beta1.Job"5\x82\xd3\xe4\x93\x02/\x12-/v1beta1/{name=projects/*/locations/*/jobs/*}\x12\x9e\x01\n\tCreateJob\x12\x30.google.cloud.scheduler.v1beta1.CreateJobRequest\x1a#.google.cloud.scheduler.v1beta1.Job":\x82\xd3\xe4\x93\x02\x34"-/v1beta1/{parent=projects/*/locations/*}/jobs:\x03job\x12\xa2\x01\n\tUpdateJob\x12\x30.google.cloud.scheduler.v1beta1.UpdateJobRequest\x1a#.google.cloud.scheduler.v1beta1.Job">\x82\xd3\xe4\x93\x02\x38\x32\x31/v1beta1/{job.name=projects/*/locations/*/jobs/*}:\x03job\x12\x8c\x01\n\tDeleteJob\x12\x30.google.cloud.scheduler.v1beta1.DeleteJobRequest\x1a\x16.google.protobuf.Empty"5\x82\xd3\xe4\x93\x02/*-/v1beta1/{name=projects/*/locations/*/jobs/*}\x12\xa0\x01\n\x08PauseJob\x12/.google.cloud.scheduler.v1beta1.PauseJobRequest\x1a#.google.cloud.scheduler.v1beta1.Job">\x82\xd3\xe4\x93\x02\x38"3/v1beta1/{name=projects/*/locations/*/jobs/*}:pause:\x01*\x12\xa3\x01\n\tResumeJob\x12\x30.google.cloud.scheduler.v1beta1.ResumeJobRequest\x1a#.google.cloud.scheduler.v1beta1.Job"?\x82\xd3\xe4\x93\x02\x39"4/v1beta1/{name=projects/*/locations/*/jobs/*}:resume:\x01*\x12\x9a\x01\n\x06RunJob\x12-.google.cloud.scheduler.v1beta1.RunJobRequest\x1a#.google.cloud.scheduler.v1beta1.Job"<\x82\xd3\xe4\x93\x02\x36"1/v1beta1/{name=projects/*/locations/*/jobs/*}:run:\x01*B\x8b\x01\n"com.google.cloud.scheduler.v1beta1B\x0eSchedulerProtoP\x01ZGgoogle.golang.org/genproto/googleapis/cloud/scheduler/v1beta1;scheduler\xa2\x02\tSCHEDULERb\x06proto3' + '\n9google/cloud/scheduler_v1beta1/proto/cloudscheduler.proto\x12\x1egoogle.cloud.scheduler.v1beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a.google/cloud/scheduler_v1beta1/proto/job.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto"s\n\x0fListJobsRequest\x12\x39\n\x06parent\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\x12!cloudscheduler.googleapis.com/Job\x12\x11\n\tpage_size\x18\x05 \x01(\x05\x12\x12\n\npage_token\x18\x06 \x01(\t"^\n\x10ListJobsResponse\x12\x31\n\x04jobs\x18\x01 \x03(\x0b\x32#.google.cloud.scheduler.v1beta1.Job\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"H\n\rGetJobRequest\x12\x37\n\x04name\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\n!cloudscheduler.googleapis.com/Job"\x84\x01\n\x10\x43reateJobRequest\x12\x39\n\x06parent\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\x12!cloudscheduler.googleapis.com/Job\x12\x35\n\x03job\x18\x02 \x01(\x0b\x32#.google.cloud.scheduler.v1beta1.JobB\x03\xe0\x41\x02"z\n\x10UpdateJobRequest\x12\x35\n\x03job\x18\x01 \x01(\x0b\x32#.google.cloud.scheduler.v1beta1.JobB\x03\xe0\x41\x02\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"K\n\x10\x44\x65leteJobRequest\x12\x37\n\x04name\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\n!cloudscheduler.googleapis.com/Job"J\n\x0fPauseJobRequest\x12\x37\n\x04name\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\n!cloudscheduler.googleapis.com/Job"K\n\x10ResumeJobRequest\x12\x37\n\x04name\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\n!cloudscheduler.googleapis.com/Job"H\n\rRunJobRequest\x12\x37\n\x04name\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\n!cloudscheduler.googleapis.com/Job2\xa6\x0b\n\x0e\x43loudScheduler\x12\xad\x01\n\x08ListJobs\x12/.google.cloud.scheduler.v1beta1.ListJobsRequest\x1a\x30.google.cloud.scheduler.v1beta1.ListJobsResponse">\x82\xd3\xe4\x93\x02/\x12-/v1beta1/{parent=projects/*/locations/*}/jobs\xda\x41\x06parent\x12\x9a\x01\n\x06GetJob\x12-.google.cloud.scheduler.v1beta1.GetJobRequest\x1a#.google.cloud.scheduler.v1beta1.Job"<\x82\xd3\xe4\x93\x02/\x12-/v1beta1/{name=projects/*/locations/*/jobs/*}\xda\x41\x04name\x12\xab\x01\n\tCreateJob\x12\x30.google.cloud.scheduler.v1beta1.CreateJobRequest\x1a#.google.cloud.scheduler.v1beta1.Job"G\x82\xd3\xe4\x93\x02\x34"-/v1beta1/{parent=projects/*/locations/*}/jobs:\x03job\xda\x41\nparent,job\x12\xb4\x01\n\tUpdateJob\x12\x30.google.cloud.scheduler.v1beta1.UpdateJobRequest\x1a#.google.cloud.scheduler.v1beta1.Job"P\x82\xd3\xe4\x93\x02\x38\x32\x31/v1beta1/{job.name=projects/*/locations/*/jobs/*}:\x03job\xda\x41\x0fjob,update_mask\x12\x93\x01\n\tDeleteJob\x12\x30.google.cloud.scheduler.v1beta1.DeleteJobRequest\x1a\x16.google.protobuf.Empty"<\x82\xd3\xe4\x93\x02/*-/v1beta1/{name=projects/*/locations/*/jobs/*}\xda\x41\x04name\x12\xa7\x01\n\x08PauseJob\x12/.google.cloud.scheduler.v1beta1.PauseJobRequest\x1a#.google.cloud.scheduler.v1beta1.Job"E\x82\xd3\xe4\x93\x02\x38"3/v1beta1/{name=projects/*/locations/*/jobs/*}:pause:\x01*\xda\x41\x04name\x12\xaa\x01\n\tResumeJob\x12\x30.google.cloud.scheduler.v1beta1.ResumeJobRequest\x1a#.google.cloud.scheduler.v1beta1.Job"F\x82\xd3\xe4\x93\x02\x39"4/v1beta1/{name=projects/*/locations/*/jobs/*}:resume:\x01*\xda\x41\x04name\x12\xa1\x01\n\x06RunJob\x12-.google.cloud.scheduler.v1beta1.RunJobRequest\x1a#.google.cloud.scheduler.v1beta1.Job"C\x82\xd3\xe4\x93\x02\x36"1/v1beta1/{name=projects/*/locations/*/jobs/*}:run:\x01*\xda\x41\x04name\x1aQ\xca\x41\x1d\x63loudscheduler.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformB\x8b\x01\n"com.google.cloud.scheduler.v1beta1B\x0eSchedulerProtoP\x01ZGgoogle.golang.org/genproto/googleapis/cloud/scheduler/v1beta1;scheduler\xa2\x02\tSCHEDULERb\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_client__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, google_dot_api_dot_resource__pb2.DESCRIPTOR, google_dot_cloud_dot_scheduler__v1beta1_dot_proto_dot_job__pb2.DESCRIPTOR, google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, @@ -66,7 +70,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A#\022!cloudscheduler.googleapis.com/Job" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -114,8 +120,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=261, - serialized_end=333, + serialized_start=319, + serialized_end=434, ) @@ -171,8 +177,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=335, - serialized_end=429, + serialized_start=436, + serialized_end=530, ) @@ -198,7 +204,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A#\n!cloudscheduler.googleapis.com/Job" + ), file=DESCRIPTOR, ) ], @@ -210,8 +218,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=431, - serialized_end=460, + serialized_start=532, + serialized_end=604, ) @@ -237,7 +245,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A#\022!cloudscheduler.googleapis.com/Job" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -255,7 +265,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -267,8 +277,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=462, - serialized_end=546, + serialized_start=607, + serialized_end=739, ) @@ -294,7 +304,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -324,8 +334,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=548, - serialized_end=665, + serialized_start=741, + serialized_end=863, ) @@ -351,7 +361,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A#\n!cloudscheduler.googleapis.com/Job" + ), file=DESCRIPTOR, ) ], @@ -363,8 +375,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=667, - serialized_end=699, + serialized_start=865, + serialized_end=940, ) @@ -390,7 +402,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A#\n!cloudscheduler.googleapis.com/Job" + ), file=DESCRIPTOR, ) ], @@ -402,8 +416,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=701, - serialized_end=732, + serialized_start=942, + serialized_end=1016, ) @@ -429,7 +443,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A#\n!cloudscheduler.googleapis.com/Job" + ), file=DESCRIPTOR, ) ], @@ -441,8 +457,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=734, - serialized_end=766, + serialized_start=1018, + serialized_end=1093, ) @@ -468,7 +484,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A#\n!cloudscheduler.googleapis.com/Job" + ), file=DESCRIPTOR, ) ], @@ -480,8 +498,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=768, - serialized_end=797, + serialized_start=1095, + serialized_end=1167, ) _LISTJOBSRESPONSE.fields_by_name[ @@ -519,7 +537,7 @@ Attributes: parent: - Required. The location name. For example: + Required. The location name. For example: ``projects/PROJECT_ID/locations/LOCATION_ID``. page_size: Requested page size. The maximum page size is 500. If @@ -582,7 +600,7 @@ Attributes: name: - Required. The job name. For example: + Required. The job name. For example: ``projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID``. """, # @@protoc_insertion_point(class_scope:google.cloud.scheduler.v1beta1.GetJobRequest) @@ -602,10 +620,10 @@ Attributes: parent: - Required. The location name. For example: + Required. The location name. For example: ``projects/PROJECT_ID/locations/LOCATION_ID``. job: - Required. The job to add. The user can optionally specify a + Required. The job to add. The user can optionally specify a name for the job in [name][google.cloud.scheduler.v1beta1.Job.name]. [name][google.cloud.scheduler.v1beta1.Job.name] cannot be the @@ -631,7 +649,7 @@ Attributes: job: - Required. The new job properties. + Required. The new job properties. [name][google.cloud.scheduler.v1beta1.Job.name] must be specified. Output only fields cannot be modified using UpdateJob. Any value specified for an output only field will @@ -657,7 +675,7 @@ Attributes: name: - Required. The job name. For example: + Required. The job name. For example: ``projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID``. """, # @@protoc_insertion_point(class_scope:google.cloud.scheduler.v1beta1.DeleteJobRequest) @@ -677,7 +695,7 @@ Attributes: name: - Required. The job name. For example: + Required. The job name. For example: ``projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID``. """, # @@protoc_insertion_point(class_scope:google.cloud.scheduler.v1beta1.PauseJobRequest) @@ -697,7 +715,7 @@ Attributes: name: - Required. The job name. For example: + Required. The job name. For example: ``projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID``. """, # @@protoc_insertion_point(class_scope:google.cloud.scheduler.v1beta1.ResumeJobRequest) @@ -717,7 +735,7 @@ Attributes: name: - Required. The job name. For example: + Required. The job name. For example: ``projects/PROJECT_ID/locations/LOCATION_ID/jobs/JOB_ID``. """, # @@protoc_insertion_point(class_scope:google.cloud.scheduler.v1beta1.RunJobRequest) @@ -727,15 +745,26 @@ DESCRIPTOR._options = None +_LISTJOBSREQUEST.fields_by_name["parent"]._options = None +_GETJOBREQUEST.fields_by_name["name"]._options = None +_CREATEJOBREQUEST.fields_by_name["parent"]._options = None +_CREATEJOBREQUEST.fields_by_name["job"]._options = None +_UPDATEJOBREQUEST.fields_by_name["job"]._options = None +_DELETEJOBREQUEST.fields_by_name["name"]._options = None +_PAUSEJOBREQUEST.fields_by_name["name"]._options = None +_RESUMEJOBREQUEST.fields_by_name["name"]._options = None +_RUNJOBREQUEST.fields_by_name["name"]._options = None _CLOUDSCHEDULER = _descriptor.ServiceDescriptor( name="CloudScheduler", full_name="google.cloud.scheduler.v1beta1.CloudScheduler", file=DESCRIPTOR, index=0, - serialized_options=None, - serialized_start=800, - serialized_end=2088, + serialized_options=_b( + "\312A\035cloudscheduler.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform" + ), + serialized_start=1170, + serialized_end=2616, methods=[ _descriptor.MethodDescriptor( name="ListJobs", @@ -745,7 +774,7 @@ input_type=_LISTJOBSREQUEST, output_type=_LISTJOBSRESPONSE, serialized_options=_b( - "\202\323\344\223\002/\022-/v1beta1/{parent=projects/*/locations/*}/jobs" + "\202\323\344\223\002/\022-/v1beta1/{parent=projects/*/locations/*}/jobs\332A\006parent" ), ), _descriptor.MethodDescriptor( @@ -756,7 +785,7 @@ input_type=_GETJOBREQUEST, output_type=google_dot_cloud_dot_scheduler__v1beta1_dot_proto_dot_job__pb2._JOB, serialized_options=_b( - "\202\323\344\223\002/\022-/v1beta1/{name=projects/*/locations/*/jobs/*}" + "\202\323\344\223\002/\022-/v1beta1/{name=projects/*/locations/*/jobs/*}\332A\004name" ), ), _descriptor.MethodDescriptor( @@ -767,7 +796,7 @@ input_type=_CREATEJOBREQUEST, output_type=google_dot_cloud_dot_scheduler__v1beta1_dot_proto_dot_job__pb2._JOB, serialized_options=_b( - '\202\323\344\223\0024"-/v1beta1/{parent=projects/*/locations/*}/jobs:\003job' + '\202\323\344\223\0024"-/v1beta1/{parent=projects/*/locations/*}/jobs:\003job\332A\nparent,job' ), ), _descriptor.MethodDescriptor( @@ -778,7 +807,7 @@ input_type=_UPDATEJOBREQUEST, output_type=google_dot_cloud_dot_scheduler__v1beta1_dot_proto_dot_job__pb2._JOB, serialized_options=_b( - "\202\323\344\223\002821/v1beta1/{job.name=projects/*/locations/*/jobs/*}:\003job" + "\202\323\344\223\002821/v1beta1/{job.name=projects/*/locations/*/jobs/*}:\003job\332A\017job,update_mask" ), ), _descriptor.MethodDescriptor( @@ -789,7 +818,7 @@ input_type=_DELETEJOBREQUEST, output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, serialized_options=_b( - "\202\323\344\223\002/*-/v1beta1/{name=projects/*/locations/*/jobs/*}" + "\202\323\344\223\002/*-/v1beta1/{name=projects/*/locations/*/jobs/*}\332A\004name" ), ), _descriptor.MethodDescriptor( @@ -800,7 +829,7 @@ input_type=_PAUSEJOBREQUEST, output_type=google_dot_cloud_dot_scheduler__v1beta1_dot_proto_dot_job__pb2._JOB, serialized_options=_b( - '\202\323\344\223\0028"3/v1beta1/{name=projects/*/locations/*/jobs/*}:pause:\001*' + '\202\323\344\223\0028"3/v1beta1/{name=projects/*/locations/*/jobs/*}:pause:\001*\332A\004name' ), ), _descriptor.MethodDescriptor( @@ -811,7 +840,7 @@ input_type=_RESUMEJOBREQUEST, output_type=google_dot_cloud_dot_scheduler__v1beta1_dot_proto_dot_job__pb2._JOB, serialized_options=_b( - '\202\323\344\223\0029"4/v1beta1/{name=projects/*/locations/*/jobs/*}:resume:\001*' + '\202\323\344\223\0029"4/v1beta1/{name=projects/*/locations/*/jobs/*}:resume:\001*\332A\004name' ), ), _descriptor.MethodDescriptor( @@ -822,7 +851,7 @@ input_type=_RUNJOBREQUEST, output_type=google_dot_cloud_dot_scheduler__v1beta1_dot_proto_dot_job__pb2._JOB, serialized_options=_b( - '\202\323\344\223\0026"1/v1beta1/{name=projects/*/locations/*/jobs/*}:run:\001*' + '\202\323\344\223\0026"1/v1beta1/{name=projects/*/locations/*/jobs/*}:run:\001*\332A\004name' ), ), ], diff --git a/scheduler/google/cloud/scheduler_v1beta1/proto/job.proto b/scheduler/google/cloud/scheduler_v1beta1/proto/job.proto index ddf910b03384..ddfda31eddc2 100644 --- a/scheduler/google/cloud/scheduler_v1beta1/proto/job.proto +++ b/scheduler/google/cloud/scheduler_v1beta1/proto/job.proto @@ -17,12 +17,12 @@ syntax = "proto3"; package google.cloud.scheduler.v1beta1; -import "google/api/annotations.proto"; import "google/api/resource.proto"; import "google/cloud/scheduler/v1beta1/target.proto"; import "google/protobuf/duration.proto"; import "google/protobuf/timestamp.proto"; import "google/rpc/status.proto"; +import "google/api/annotations.proto"; option go_package = "google.golang.org/genproto/googleapis/cloud/scheduler/v1beta1;scheduler"; option java_multiple_files = true; @@ -32,6 +32,11 @@ option java_package = "com.google.cloud.scheduler.v1beta1"; // Configuration for a job. // The maximum allowed size for a job is 100KB. message Job { + option (google.api.resource) = { + type: "cloudscheduler.googleapis.com/Job" + pattern: "projects/{project}/locations/{location}/jobs/{job}" + }; + // State of the job. enum State { // Unspecified state. diff --git a/scheduler/google/cloud/scheduler_v1beta1/proto/job_pb2.py b/scheduler/google/cloud/scheduler_v1beta1/proto/job_pb2.py index 0536ae370635..980631b89ea8 100644 --- a/scheduler/google/cloud/scheduler_v1beta1/proto/job_pb2.py +++ b/scheduler/google/cloud/scheduler_v1beta1/proto/job_pb2.py @@ -15,7 +15,6 @@ _sym_db = _symbol_database.Default() -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 from google.cloud.scheduler_v1beta1.proto import ( target_pb2 as google_dot_cloud_dot_scheduler__v1beta1_dot_proto_dot_target__pb2, @@ -23,6 +22,7 @@ from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 from google.rpc import status_pb2 as google_dot_rpc_dot_status__pb2 +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 DESCRIPTOR = _descriptor.FileDescriptor( @@ -33,15 +33,15 @@ '\n"com.google.cloud.scheduler.v1beta1B\010JobProtoP\001ZGgoogle.golang.org/genproto/googleapis/cloud/scheduler/v1beta1;scheduler' ), serialized_pb=_b( - '\n.google/cloud/scheduler_v1beta1/proto/job.proto\x12\x1egoogle.cloud.scheduler.v1beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x19google/api/resource.proto\x1a\x31google/cloud/scheduler_v1beta1/proto/target.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto"\x88\x06\n\x03Job\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x45\n\rpubsub_target\x18\x04 \x01(\x0b\x32,.google.cloud.scheduler.v1beta1.PubsubTargetH\x00\x12U\n\x16\x61pp_engine_http_target\x18\x05 \x01(\x0b\x32\x33.google.cloud.scheduler.v1beta1.AppEngineHttpTargetH\x00\x12\x41\n\x0bhttp_target\x18\x06 \x01(\x0b\x32*.google.cloud.scheduler.v1beta1.HttpTargetH\x00\x12\x10\n\x08schedule\x18\x14 \x01(\t\x12\x11\n\ttime_zone\x18\x15 \x01(\t\x12\x34\n\x10user_update_time\x18\t \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x38\n\x05state\x18\n \x01(\x0e\x32).google.cloud.scheduler.v1beta1.Job.State\x12"\n\x06status\x18\x0b \x01(\x0b\x32\x12.google.rpc.Status\x12\x31\n\rschedule_time\x18\x11 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x35\n\x11last_attempt_time\x18\x12 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x41\n\x0cretry_config\x18\x13 \x01(\x0b\x32+.google.cloud.scheduler.v1beta1.RetryConfig\x12\x33\n\x10\x61ttempt_deadline\x18\x16 \x01(\x0b\x32\x19.google.protobuf.Duration"X\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07\x45NABLED\x10\x01\x12\n\n\x06PAUSED\x10\x02\x12\x0c\n\x08\x44ISABLED\x10\x03\x12\x11\n\rUPDATE_FAILED\x10\x04\x42\x08\n\x06target"\xe2\x01\n\x0bRetryConfig\x12\x13\n\x0bretry_count\x18\x01 \x01(\x05\x12\x35\n\x12max_retry_duration\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x37\n\x14min_backoff_duration\x18\x03 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x37\n\x14max_backoff_duration\x18\x04 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x15\n\rmax_doublings\x18\x05 \x01(\x05\x42y\n"com.google.cloud.scheduler.v1beta1B\x08JobProtoP\x01ZGgoogle.golang.org/genproto/googleapis/cloud/scheduler/v1beta1;schedulerb\x06proto3' + '\n.google/cloud/scheduler_v1beta1/proto/job.proto\x12\x1egoogle.cloud.scheduler.v1beta1\x1a\x19google/api/resource.proto\x1a\x31google/cloud/scheduler_v1beta1/proto/target.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto\x1a\x1cgoogle/api/annotations.proto"\xe4\x06\n\x03Job\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x45\n\rpubsub_target\x18\x04 \x01(\x0b\x32,.google.cloud.scheduler.v1beta1.PubsubTargetH\x00\x12U\n\x16\x61pp_engine_http_target\x18\x05 \x01(\x0b\x32\x33.google.cloud.scheduler.v1beta1.AppEngineHttpTargetH\x00\x12\x41\n\x0bhttp_target\x18\x06 \x01(\x0b\x32*.google.cloud.scheduler.v1beta1.HttpTargetH\x00\x12\x10\n\x08schedule\x18\x14 \x01(\t\x12\x11\n\ttime_zone\x18\x15 \x01(\t\x12\x34\n\x10user_update_time\x18\t \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x38\n\x05state\x18\n \x01(\x0e\x32).google.cloud.scheduler.v1beta1.Job.State\x12"\n\x06status\x18\x0b \x01(\x0b\x32\x12.google.rpc.Status\x12\x31\n\rschedule_time\x18\x11 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x35\n\x11last_attempt_time\x18\x12 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x41\n\x0cretry_config\x18\x13 \x01(\x0b\x32+.google.cloud.scheduler.v1beta1.RetryConfig\x12\x33\n\x10\x61ttempt_deadline\x18\x16 \x01(\x0b\x32\x19.google.protobuf.Duration"X\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07\x45NABLED\x10\x01\x12\n\n\x06PAUSED\x10\x02\x12\x0c\n\x08\x44ISABLED\x10\x03\x12\x11\n\rUPDATE_FAILED\x10\x04:Z\xea\x41W\n!cloudscheduler.googleapis.com/Job\x12\x32projects/{project}/locations/{location}/jobs/{job}B\x08\n\x06target"\xe2\x01\n\x0bRetryConfig\x12\x13\n\x0bretry_count\x18\x01 \x01(\x05\x12\x35\n\x12max_retry_duration\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x37\n\x14min_backoff_duration\x18\x03 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x37\n\x14max_backoff_duration\x18\x04 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x15\n\rmax_doublings\x18\x05 \x01(\x05\x42y\n"com.google.cloud.scheduler.v1beta1B\x08JobProtoP\x01ZGgoogle.golang.org/genproto/googleapis/cloud/scheduler/v1beta1;schedulerb\x06proto3' ), dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, google_dot_api_dot_resource__pb2.DESCRIPTOR, google_dot_cloud_dot_scheduler__v1beta1_dot_proto_dot_target__pb2.DESCRIPTOR, google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, google_dot_rpc_dot_status__pb2.DESCRIPTOR, + google_dot_api_dot_annotations__pb2.DESCRIPTOR, ], ) @@ -343,7 +343,9 @@ extensions=[], nested_types=[], enum_types=[_JOB_STATE], - serialized_options=None, + serialized_options=_b( + "\352AW\n!cloudscheduler.googleapis.com/Job\0222projects/{project}/locations/{location}/jobs/{job}" + ), is_extendable=False, syntax="proto3", extension_ranges=[], @@ -357,7 +359,7 @@ ) ], serialized_start=281, - serialized_end=1057, + serialized_end=1149, ) @@ -467,8 +469,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1060, - serialized_end=1286, + serialized_start=1152, + serialized_end=1378, ) _JOB.fields_by_name[ @@ -703,4 +705,5 @@ DESCRIPTOR._options = None +_JOB._options = None # @@protoc_insertion_point(module_scope) diff --git a/scheduler/google/cloud/scheduler_v1beta1/proto/target.proto b/scheduler/google/cloud/scheduler_v1beta1/proto/target.proto index 3bb44a1fb85d..4b47e356768b 100644 --- a/scheduler/google/cloud/scheduler_v1beta1/proto/target.proto +++ b/scheduler/google/cloud/scheduler_v1beta1/proto/target.proto @@ -17,8 +17,8 @@ syntax = "proto3"; package google.cloud.scheduler.v1beta1; +import "google/api/resource.proto"; import "google/api/annotations.proto"; -import "google/protobuf/any.proto"; option go_package = "google.golang.org/genproto/googleapis/cloud/scheduler/v1beta1;scheduler"; option java_multiple_files = true; @@ -32,9 +32,7 @@ option java_package = "com.google.cloud.scheduler.v1beta1"; // constitutes a failed execution. For a redirected request, the response // returned by the redirected request is considered. message HttpTarget { - // Required. - // - // The full URI path that the request will be sent to. This string + // Required. The full URI path that the request will be sent to. This string // must begin with either "http://" or "https://". Some examples of // valid values for [uri][google.cloud.scheduler.v1beta1.HttpTarget.uri] are: // `http://acme.com` and `https://acme.com/sales:8080`. Cloud Scheduler will @@ -77,8 +75,8 @@ message HttpTarget { // will be generated and attached as an `Authorization` header in the HTTP // request. // - // This type of authorization should be used when sending requests to a GCP - // endpoint. + // This type of authorization should generally only be used when calling + // Google APIs hosted on *.googleapis.com. OAuthToken oauth_token = 5; // If specified, an @@ -86,8 +84,9 @@ message HttpTarget { // token will be generated and attached as an `Authorization` header in the // HTTP request. // - // This type of authorization should be used when sending requests to third - // party endpoints. + // This type of authorization can be used for many scenarios, including + // calling Cloud Run, or endpoints where you intend to validate the token + // yourself. OidcToken oidc_token = 6; } } @@ -162,16 +161,16 @@ message AppEngineHttpTarget { // Pub/Sub target. The job will be delivered by publishing a message to // the given Pub/Sub topic. message PubsubTarget { - // Required. - // - // The name of the Cloud Pub/Sub topic to which messages will + // Required. The name of the Cloud Pub/Sub topic to which messages will // be published when a job is delivered. The topic name must be in the // same format as required by PubSub's // [PublishRequest.name](https://cloud.google.com/pubsub/docs/reference/rpc/google.pubsub.v1#publishrequest), // for example `projects/PROJECT_ID/topics/TOPIC_ID`. // // The topic must be in the same project as the Cloud Scheduler job. - string topic_name = 1; + string topic_name = 1 [(google.api.resource_reference) = { + type: "pubsub.googleapis.com/Topic" + }]; // The message payload for PubsubMessage. // @@ -315,8 +314,8 @@ enum HttpMethod { // Contains information needed for generating an // [OAuth token](https://developers.google.com/identity/protocols/OAuth2). -// This type of authorization should be used when sending requests to a GCP -// endpoint. +// This type of authorization should generally only be used when calling Google +// APIs hosted on *.googleapis.com. message OAuthToken { // [Service account email](https://cloud.google.com/iam/docs/service-accounts) // to be used for generating OAuth token. @@ -332,9 +331,10 @@ message OAuthToken { // Contains information needed for generating an // [OpenID Connect -// token](https://developers.google.com/identity/protocols/OpenIDConnect). This -// type of authorization should be used when sending requests to third party -// endpoints. +// token](https://developers.google.com/identity/protocols/OpenIDConnect). +// This type of authorization can be used for many scenarios, including +// calling Cloud Run, or endpoints where you intend to validate the token +// yourself. message OidcToken { // [Service account email](https://cloud.google.com/iam/docs/service-accounts) // to be used for generating OIDC token. @@ -346,3 +346,11 @@ message OidcToken { // specified in target will be used. string audience = 2; } + +// The Pub/Sub Topic resource definition is in google/cloud/pubsub/v1/, +// but we do not import that proto directly; therefore, we redefine the +// pattern here. +option (google.api.resource_definition) = { + type: "pubsub.googleapis.com/Topic" + pattern: "projects/{project}/topics/{topic}" +}; diff --git a/scheduler/google/cloud/scheduler_v1beta1/proto/target_pb2.py b/scheduler/google/cloud/scheduler_v1beta1/proto/target_pb2.py index f44a902f973d..e1bb923a9507 100644 --- a/scheduler/google/cloud/scheduler_v1beta1/proto/target_pb2.py +++ b/scheduler/google/cloud/scheduler_v1beta1/proto/target_pb2.py @@ -16,8 +16,8 @@ _sym_db = _symbol_database.Default() +from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2 DESCRIPTOR = _descriptor.FileDescriptor( @@ -25,14 +25,14 @@ package="google.cloud.scheduler.v1beta1", syntax="proto3", serialized_options=_b( - '\n"com.google.cloud.scheduler.v1beta1B\013TargetProtoP\001ZGgoogle.golang.org/genproto/googleapis/cloud/scheduler/v1beta1;scheduler' + '\n"com.google.cloud.scheduler.v1beta1B\013TargetProtoP\001ZGgoogle.golang.org/genproto/googleapis/cloud/scheduler/v1beta1;scheduler\352A@\n\033pubsub.googleapis.com/Topic\022!projects/{project}/topics/{topic}' ), serialized_pb=_b( - '\n1google/cloud/scheduler_v1beta1/proto/target.proto\x12\x1egoogle.cloud.scheduler.v1beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x19google/protobuf/any.proto"\xfe\x02\n\nHttpTarget\x12\x0b\n\x03uri\x18\x01 \x01(\t\x12?\n\x0bhttp_method\x18\x02 \x01(\x0e\x32*.google.cloud.scheduler.v1beta1.HttpMethod\x12H\n\x07headers\x18\x03 \x03(\x0b\x32\x37.google.cloud.scheduler.v1beta1.HttpTarget.HeadersEntry\x12\x0c\n\x04\x62ody\x18\x04 \x01(\x0c\x12\x41\n\x0boauth_token\x18\x05 \x01(\x0b\x32*.google.cloud.scheduler.v1beta1.OAuthTokenH\x00\x12?\n\noidc_token\x18\x06 \x01(\x0b\x32).google.cloud.scheduler.v1beta1.OidcTokenH\x00\x1a.\n\x0cHeadersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x16\n\x14\x61uthorization_header"\xcb\x02\n\x13\x41ppEngineHttpTarget\x12?\n\x0bhttp_method\x18\x01 \x01(\x0e\x32*.google.cloud.scheduler.v1beta1.HttpMethod\x12L\n\x12\x61pp_engine_routing\x18\x02 \x01(\x0b\x32\x30.google.cloud.scheduler.v1beta1.AppEngineRouting\x12\x14\n\x0crelative_uri\x18\x03 \x01(\t\x12Q\n\x07headers\x18\x04 \x03(\x0b\x32@.google.cloud.scheduler.v1beta1.AppEngineHttpTarget.HeadersEntry\x12\x0c\n\x04\x62ody\x18\x05 \x01(\x0c\x1a.\n\x0cHeadersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xb5\x01\n\x0cPubsubTarget\x12\x12\n\ntopic_name\x18\x01 \x01(\t\x12\x0c\n\x04\x64\x61ta\x18\x03 \x01(\x0c\x12P\n\nattributes\x18\x04 \x03(\x0b\x32<.google.cloud.scheduler.v1beta1.PubsubTarget.AttributesEntry\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"T\n\x10\x41ppEngineRouting\x12\x0f\n\x07service\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\t\x12\x10\n\x08instance\x18\x03 \x01(\t\x12\x0c\n\x04host\x18\x04 \x01(\t":\n\nOAuthToken\x12\x1d\n\x15service_account_email\x18\x01 \x01(\t\x12\r\n\x05scope\x18\x02 \x01(\t"<\n\tOidcToken\x12\x1d\n\x15service_account_email\x18\x01 \x01(\t\x12\x10\n\x08\x61udience\x18\x02 \x01(\t*s\n\nHttpMethod\x12\x1b\n\x17HTTP_METHOD_UNSPECIFIED\x10\x00\x12\x08\n\x04POST\x10\x01\x12\x07\n\x03GET\x10\x02\x12\x08\n\x04HEAD\x10\x03\x12\x07\n\x03PUT\x10\x04\x12\n\n\x06\x44\x45LETE\x10\x05\x12\t\n\x05PATCH\x10\x06\x12\x0b\n\x07OPTIONS\x10\x07\x42|\n"com.google.cloud.scheduler.v1beta1B\x0bTargetProtoP\x01ZGgoogle.golang.org/genproto/googleapis/cloud/scheduler/v1beta1;schedulerb\x06proto3' + '\n1google/cloud/scheduler_v1beta1/proto/target.proto\x12\x1egoogle.cloud.scheduler.v1beta1\x1a\x19google/api/resource.proto\x1a\x1cgoogle/api/annotations.proto"\xfe\x02\n\nHttpTarget\x12\x0b\n\x03uri\x18\x01 \x01(\t\x12?\n\x0bhttp_method\x18\x02 \x01(\x0e\x32*.google.cloud.scheduler.v1beta1.HttpMethod\x12H\n\x07headers\x18\x03 \x03(\x0b\x32\x37.google.cloud.scheduler.v1beta1.HttpTarget.HeadersEntry\x12\x0c\n\x04\x62ody\x18\x04 \x01(\x0c\x12\x41\n\x0boauth_token\x18\x05 \x01(\x0b\x32*.google.cloud.scheduler.v1beta1.OAuthTokenH\x00\x12?\n\noidc_token\x18\x06 \x01(\x0b\x32).google.cloud.scheduler.v1beta1.OidcTokenH\x00\x1a.\n\x0cHeadersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x16\n\x14\x61uthorization_header"\xcb\x02\n\x13\x41ppEngineHttpTarget\x12?\n\x0bhttp_method\x18\x01 \x01(\x0e\x32*.google.cloud.scheduler.v1beta1.HttpMethod\x12L\n\x12\x61pp_engine_routing\x18\x02 \x01(\x0b\x32\x30.google.cloud.scheduler.v1beta1.AppEngineRouting\x12\x14\n\x0crelative_uri\x18\x03 \x01(\t\x12Q\n\x07headers\x18\x04 \x03(\x0b\x32@.google.cloud.scheduler.v1beta1.AppEngineHttpTarget.HeadersEntry\x12\x0c\n\x04\x62ody\x18\x05 \x01(\x0c\x1a.\n\x0cHeadersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xd7\x01\n\x0cPubsubTarget\x12\x34\n\ntopic_name\x18\x01 \x01(\tB \xfa\x41\x1d\n\x1bpubsub.googleapis.com/Topic\x12\x0c\n\x04\x64\x61ta\x18\x03 \x01(\x0c\x12P\n\nattributes\x18\x04 \x03(\x0b\x32<.google.cloud.scheduler.v1beta1.PubsubTarget.AttributesEntry\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"T\n\x10\x41ppEngineRouting\x12\x0f\n\x07service\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\t\x12\x10\n\x08instance\x18\x03 \x01(\t\x12\x0c\n\x04host\x18\x04 \x01(\t":\n\nOAuthToken\x12\x1d\n\x15service_account_email\x18\x01 \x01(\t\x12\r\n\x05scope\x18\x02 \x01(\t"<\n\tOidcToken\x12\x1d\n\x15service_account_email\x18\x01 \x01(\t\x12\x10\n\x08\x61udience\x18\x02 \x01(\t*s\n\nHttpMethod\x12\x1b\n\x17HTTP_METHOD_UNSPECIFIED\x10\x00\x12\x08\n\x04POST\x10\x01\x12\x07\n\x03GET\x10\x02\x12\x08\n\x04HEAD\x10\x03\x12\x07\n\x03PUT\x10\x04\x12\n\n\x06\x44\x45LETE\x10\x05\x12\t\n\x05PATCH\x10\x06\x12\x0b\n\x07OPTIONS\x10\x07\x42\xbf\x01\n"com.google.cloud.scheduler.v1beta1B\x0bTargetProtoP\x01ZGgoogle.golang.org/genproto/googleapis/cloud/scheduler/v1beta1;scheduler\xea\x41@\n\x1bpubsub.googleapis.com/Topic\x12!projects/{project}/topics/{topic}b\x06proto3' ), dependencies=[ + google_dot_api_dot_resource__pb2.DESCRIPTOR, google_dot_api_dot_annotations__pb2.DESCRIPTOR, - google_dot_protobuf_dot_any__pb2.DESCRIPTOR, ], ) @@ -73,8 +73,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=1253, - serialized_end=1368, + serialized_start=1287, + serialized_end=1402, ) _sym_db.RegisterEnumDescriptor(_HTTPMETHOD) @@ -501,8 +501,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=994, - serialized_end=1043, + serialized_start=1028, + serialized_end=1077, ) _PUBSUBTARGET = _descriptor.Descriptor( @@ -527,7 +527,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\372A\035\n\033pubsub.googleapis.com/Topic"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -576,7 +576,7 @@ extension_ranges=[], oneofs=[], serialized_start=862, - serialized_end=1043, + serialized_end=1077, ) @@ -668,8 +668,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1045, - serialized_end=1129, + serialized_start=1079, + serialized_end=1163, ) @@ -725,8 +725,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1131, - serialized_end=1189, + serialized_start=1165, + serialized_end=1223, ) @@ -782,8 +782,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1191, - serialized_end=1251, + serialized_start=1225, + serialized_end=1285, ) _HTTPTARGET_HEADERSENTRY.containing_type = _HTTPTARGET @@ -848,7 +848,7 @@ Attributes: uri: - Required. The full URI path that the request will be sent to. + Required. The full URI path that the request will be sent to. This string must begin with either "http://" or "https://". Some examples of valid values for [uri][google.cloud.scheduler.v1beta1.HttpTarget.uri] are: @@ -887,14 +887,16 @@ If specified, an `OAuth token `__ will be generated and attached as an ``Authorization`` header - in the HTTP request. This type of authorization should be - used when sending requests to a GCP endpoint. + in the HTTP request. This type of authorization should + generally only be used when calling Google APIs hosted on + \*.googleapis.com. oidc_token: If specified, an `OIDC `__ token will be generated and attached as an ``Authorization`` header in the HTTP request. - This type of authorization should be used when sending - requests to third party endpoints. + This type of authorization can be used for many scenarios, + including calling Cloud Run, or endpoints where you intend to + validate the token yourself. """, # @@protoc_insertion_point(class_scope:google.cloud.scheduler.v1beta1.HttpTarget) ), @@ -999,7 +1001,7 @@ Attributes: topic_name: - Required. The name of the Cloud Pub/Sub topic to which + Required. The name of the Cloud Pub/Sub topic to which messages will be published when a job is delivered. The topic name must be in the same format as required by PubSub's `PublishRequest.name `__. This - type of authorization should be used when sending requests to a GCP - endpoint. + type of authorization should generally only be used when calling Google + APIs hosted on \*.googleapis.com. Attributes: @@ -1150,8 +1152,9 @@ __module__="google.cloud.scheduler_v1beta1.proto.target_pb2", __doc__="""Contains information needed for generating an `OpenID Connect token `__. - This type of authorization should be used when sending requests to third - party endpoints. + This type of authorization can be used for many scenarios, including + calling Cloud Run, or endpoints where you intend to validate the token + yourself. Attributes: @@ -1175,4 +1178,5 @@ _HTTPTARGET_HEADERSENTRY._options = None _APPENGINEHTTPTARGET_HEADERSENTRY._options = None _PUBSUBTARGET_ATTRIBUTESENTRY._options = None +_PUBSUBTARGET.fields_by_name["topic_name"]._options = None # @@protoc_insertion_point(module_scope) diff --git a/scheduler/synth.metadata b/scheduler/synth.metadata index 86c1219f85b7..797c82f3212b 100644 --- a/scheduler/synth.metadata +++ b/scheduler/synth.metadata @@ -1,26 +1,26 @@ { - "updateTime": "2019-08-06T12:36:15.641949Z", + "updateTime": "2019-10-23T12:31:17.538221Z", "sources": [ { "generator": { "name": "artman", - "version": "0.32.1", - "dockerImage": "googleapis/artman@sha256:a684d40ba9a4e15946f5f2ca6b4bd9fe301192f522e9de4fff622118775f309b" + "version": "0.40.2", + "dockerImage": "googleapis/artman@sha256:3b8f7d9b4c206843ce08053474f5c64ae4d388ff7d995e68b59fb65edf73eeb9" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "e699b0cba64ffddfae39633417180f1f65875896", - "internalRef": "261759677" + "sha": "0d0dc5172f16c9815a5eda6e99408fb96282f608", + "internalRef": "276178557" } }, { "template": { "name": "python_library", "origin": "synthtool.gcp", - "version": "2019.5.2" + "version": "2019.10.17" } } ], diff --git a/securitycenter/google/cloud/securitycenter.py b/securitycenter/google/cloud/securitycenter.py index 7b5ed2b41912..e2b719caee36 100644 --- a/securitycenter/google/cloud/securitycenter.py +++ b/securitycenter/google/cloud/securitycenter.py @@ -22,4 +22,8 @@ from google.cloud.securitycenter_v1 import types -__all__ = ("enums", "types", "SecurityCenterClient") +__all__ = ( + "enums", + "types", + "SecurityCenterClient", +) diff --git a/securitycenter/google/cloud/securitycenter_v1/__init__.py b/securitycenter/google/cloud/securitycenter_v1/__init__.py index f8442f6bfc9b..090c03d10cca 100644 --- a/securitycenter/google/cloud/securitycenter_v1/__init__.py +++ b/securitycenter/google/cloud/securitycenter_v1/__init__.py @@ -27,4 +27,8 @@ class SecurityCenterClient(security_center_client.SecurityCenterClient): enums = enums -__all__ = ("enums", "types", "SecurityCenterClient") +__all__ = ( + "enums", + "types", + "SecurityCenterClient", +) diff --git a/securitycenter/google/cloud/securitycenter_v1/gapic/security_center_client.py b/securitycenter/google/cloud/securitycenter_v1/gapic/security_center_client.py index ccc7c2a93a53..eaf1e33ae381 100644 --- a/securitycenter/google/cloud/securitycenter_v1/gapic/security_center_client.py +++ b/securitycenter/google/cloud/securitycenter_v1/gapic/security_center_client.py @@ -56,7 +56,7 @@ _GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( - "google-cloud-securitycenter" + "google-cloud-securitycenter", ).version @@ -132,7 +132,7 @@ def finding_security_marks_path(cls, organization, source, finding): def organization_path(cls, organization): """Return a fully-qualified organization string.""" return google.api_core.path_template.expand( - "organizations/{organization}", organization=organization + "organizations/{organization}", organization=organization, ) @classmethod @@ -147,7 +147,7 @@ def organization_settings_path(cls, organization): def organization_sources_path(cls, organization): """Return a fully-qualified organization_sources string.""" return google.api_core.path_template.expand( - "organizations/{organization}/sources/-", organization=organization + "organizations/{organization}/sources/-", organization=organization, ) @classmethod @@ -246,12 +246,12 @@ def __init__( self.transport = transport else: self.transport = security_center_grpc_transport.SecurityCenterGrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials + address=api_endpoint, channel=channel, credentials=credentials, ) if client_info is None: client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION + gapic_version=_GAPIC_LIBRARY_VERSION, ) else: client_info.gapic_version = _GAPIC_LIBRARY_VERSION @@ -262,7 +262,7 @@ def __init__( # (Ordinarily, these are the defaults specified in the `*_config.py` # file next to this one.) self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config["interfaces"][self._INTERFACE_NAME] + client_config["interfaces"][self._INTERFACE_NAME], ) # Save a dictionary of cached API call functions. @@ -334,7 +334,7 @@ def create_source( ) request = securitycenter_service_pb2.CreateSourceRequest( - parent=parent, source=source + parent=parent, source=source, ) if metadata is None: metadata = [] @@ -423,7 +423,7 @@ def create_finding( ) request = securitycenter_service_pb2.CreateFindingRequest( - parent=parent, finding_id=finding_id, finding=finding + parent=parent, finding_id=finding_id, finding=finding, ) if metadata is None: metadata = [] @@ -501,7 +501,7 @@ def get_iam_policy( ) request = iam_policy_pb2.GetIamPolicyRequest( - resource=resource, options=options_ + resource=resource, options=options_, ) if metadata is None: metadata = [] @@ -572,7 +572,7 @@ def get_organization_settings( client_info=self._client_info, ) - request = securitycenter_service_pb2.GetOrganizationSettingsRequest(name=name) + request = securitycenter_service_pb2.GetOrganizationSettingsRequest(name=name,) if metadata is None: metadata = [] metadata = list(metadata) @@ -642,7 +642,7 @@ def get_source( client_info=self._client_info, ) - request = securitycenter_service_pb2.GetSourceRequest(name=name) + request = securitycenter_service_pb2.GetSourceRequest(name=name,) if metadata is None: metadata = [] metadata = list(metadata) @@ -1592,7 +1592,7 @@ def list_sources( ) request = securitycenter_service_pb2.ListSourcesRequest( - parent=parent, page_size=page_size + parent=parent, page_size=page_size, ) if metadata is None: metadata = [] @@ -1688,7 +1688,7 @@ def run_asset_discovery( client_info=self._client_info, ) - request = securitycenter_service_pb2.RunAssetDiscoveryRequest(parent=parent) + request = securitycenter_service_pb2.RunAssetDiscoveryRequest(parent=parent,) if metadata is None: metadata = [] metadata = list(metadata) @@ -1780,7 +1780,7 @@ def set_finding_state( ) request = securitycenter_service_pb2.SetFindingStateRequest( - name=name, state=state, start_time=start_time + name=name, state=state, start_time=start_time, ) if metadata is None: metadata = [] @@ -1862,7 +1862,7 @@ def set_iam_policy( client_info=self._client_info, ) - request = iam_policy_pb2.SetIamPolicyRequest(resource=resource, policy=policy) + request = iam_policy_pb2.SetIamPolicyRequest(resource=resource, policy=policy,) if metadata is None: metadata = [] metadata = list(metadata) @@ -1941,7 +1941,7 @@ def test_iam_permissions( ) request = iam_policy_pb2.TestIamPermissionsRequest( - resource=resource, permissions=permissions + resource=resource, permissions=permissions, ) if metadata is None: metadata = [] @@ -2033,7 +2033,7 @@ def update_finding( ) request = securitycenter_service_pb2.UpdateFindingRequest( - finding=finding, update_mask=update_mask + finding=finding, update_mask=update_mask, ) if metadata is None: metadata = [] @@ -2117,7 +2117,7 @@ def update_organization_settings( ) request = securitycenter_service_pb2.UpdateOrganizationSettingsRequest( - organization_settings=organization_settings, update_mask=update_mask + organization_settings=organization_settings, update_mask=update_mask, ) if metadata is None: metadata = [] @@ -2201,7 +2201,7 @@ def update_source( ) request = securitycenter_service_pb2.UpdateSourceRequest( - source=source, update_mask=update_mask + source=source, update_mask=update_mask, ) if metadata is None: metadata = [] diff --git a/securitycenter/google/cloud/securitycenter_v1/gapic/transports/security_center_grpc_transport.py b/securitycenter/google/cloud/securitycenter_v1/gapic/transports/security_center_grpc_transport.py index f93386352719..1b6be666cee2 100644 --- a/securitycenter/google/cloud/securitycenter_v1/gapic/transports/security_center_grpc_transport.py +++ b/securitycenter/google/cloud/securitycenter_v1/gapic/transports/security_center_grpc_transport.py @@ -57,7 +57,7 @@ def __init__( # exception (channels come with credentials baked in already). if channel is not None and credentials is not None: raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive." + "The `channel` and `credentials` arguments are mutually " "exclusive.", ) # Create the channel. @@ -78,7 +78,7 @@ def __init__( self._stubs = { "security_center_stub": securitycenter_service_pb2_grpc.SecurityCenterStub( channel - ) + ), } # Because this API includes a method that returns a diff --git a/securitycenter/google/cloud/securitycenter_v1/proto/asset_pb2.py b/securitycenter/google/cloud/securitycenter_v1/proto/asset_pb2.py index 68d334f060d7..ff176dabf050 100644 --- a/securitycenter/google/cloud/securitycenter_v1/proto/asset_pb2.py +++ b/securitycenter/google/cloud/securitycenter_v1/proto/asset_pb2.py @@ -176,7 +176,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], diff --git a/securitycenter/google/cloud/securitycenter_v1/proto/finding_pb2.py b/securitycenter/google/cloud/securitycenter_v1/proto/finding_pb2.py index 5b96096dee3b..f0f534134c40 100644 --- a/securitycenter/google/cloud/securitycenter_v1/proto/finding_pb2.py +++ b/securitycenter/google/cloud/securitycenter_v1/proto/finding_pb2.py @@ -315,8 +315,8 @@ ), ], extensions=[], - nested_types=[_FINDING_SOURCEPROPERTIESENTRY], - enum_types=[_FINDING_STATE], + nested_types=[_FINDING_SOURCEPROPERTIESENTRY,], + enum_types=[_FINDING_STATE,], serialized_options=None, is_extendable=False, syntax="proto3", diff --git a/securitycenter/google/cloud/securitycenter_v1/proto/organization_settings_pb2.py b/securitycenter/google/cloud/securitycenter_v1/proto/organization_settings_pb2.py index 8e9189fbb162..7056f7b07520 100644 --- a/securitycenter/google/cloud/securitycenter_v1/proto/organization_settings_pb2.py +++ b/securitycenter/google/cloud/securitycenter_v1/proto/organization_settings_pb2.py @@ -28,7 +28,7 @@ serialized_pb=_b( '\n@google/cloud/securitycenter_v1/proto/organization_settings.proto\x12\x1egoogle.cloud.securitycenter.v1\x1a\x1cgoogle/api/annotations.proto"\xaa\x03\n\x14OrganizationSettings\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x1e\n\x16\x65nable_asset_discovery\x18\x02 \x01(\x08\x12i\n\x16\x61sset_discovery_config\x18\x03 \x01(\x0b\x32I.google.cloud.securitycenter.v1.OrganizationSettings.AssetDiscoveryConfig\x1a\xec\x01\n\x14\x41ssetDiscoveryConfig\x12\x13\n\x0bproject_ids\x18\x01 \x03(\t\x12o\n\x0einclusion_mode\x18\x02 \x01(\x0e\x32W.google.cloud.securitycenter.v1.OrganizationSettings.AssetDiscoveryConfig.InclusionMode"N\n\rInclusionMode\x12\x1e\n\x1aINCLUSION_MODE_UNSPECIFIED\x10\x00\x12\x10\n\x0cINCLUDE_ONLY\x10\x01\x12\x0b\n\x07\x45XCLUDE\x10\x02J\x04\x08\x04\x10\x05J\x04\x08\x05\x10\x06\x42\xda\x01\n"com.google.cloud.securitycenter.v1P\x01ZLgoogle.golang.org/genproto/googleapis/cloud/securitycenter/v1;securitycenter\xaa\x02\x1eGoogle.Cloud.SecurityCenter.V1\xca\x02\x1eGoogle\\Cloud\\SecurityCenter\\V1\xea\x02!Google::Cloud::SecurityCenter::V1b\x06proto3' ), - dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR], + dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,], ) @@ -106,7 +106,7 @@ ], extensions=[], nested_types=[], - enum_types=[_ORGANIZATIONSETTINGS_ASSETDISCOVERYCONFIG_INCLUSIONMODE], + enum_types=[_ORGANIZATIONSETTINGS_ASSETDISCOVERYCONFIG_INCLUSIONMODE,], serialized_options=None, is_extendable=False, syntax="proto3", @@ -179,7 +179,7 @@ ), ], extensions=[], - nested_types=[_ORGANIZATIONSETTINGS_ASSETDISCOVERYCONFIG], + nested_types=[_ORGANIZATIONSETTINGS_ASSETDISCOVERYCONFIG,], enum_types=[], serialized_options=None, is_extendable=False, diff --git a/securitycenter/google/cloud/securitycenter_v1/proto/run_asset_discovery_response_pb2.py b/securitycenter/google/cloud/securitycenter_v1/proto/run_asset_discovery_response_pb2.py index 447d1c4ad733..44504bf5d0f3 100644 --- a/securitycenter/google/cloud/securitycenter_v1/proto/run_asset_discovery_response_pb2.py +++ b/securitycenter/google/cloud/securitycenter_v1/proto/run_asset_discovery_response_pb2.py @@ -115,7 +115,7 @@ ], extensions=[], nested_types=[], - enum_types=[_RUNASSETDISCOVERYRESPONSE_STATE], + enum_types=[_RUNASSETDISCOVERYRESPONSE_STATE,], serialized_options=None, is_extendable=False, syntax="proto3", diff --git a/securitycenter/google/cloud/securitycenter_v1/proto/security_marks_pb2.py b/securitycenter/google/cloud/securitycenter_v1/proto/security_marks_pb2.py index ab891dada5a8..f06ae1bea6b2 100644 --- a/securitycenter/google/cloud/securitycenter_v1/proto/security_marks_pb2.py +++ b/securitycenter/google/cloud/securitycenter_v1/proto/security_marks_pb2.py @@ -28,7 +28,7 @@ serialized_pb=_b( '\n9google/cloud/securitycenter_v1/proto/security_marks.proto\x12\x1egoogle.cloud.securitycenter.v1\x1a\x1cgoogle/api/annotations.proto"\x94\x01\n\rSecurityMarks\x12\x0c\n\x04name\x18\x01 \x01(\t\x12G\n\x05marks\x18\x02 \x03(\x0b\x32\x38.google.cloud.securitycenter.v1.SecurityMarks.MarksEntry\x1a,\n\nMarksEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\xda\x01\n"com.google.cloud.securitycenter.v1P\x01ZLgoogle.golang.org/genproto/googleapis/cloud/securitycenter/v1;securitycenter\xaa\x02\x1eGoogle.Cloud.SecurityCenter.V1\xca\x02\x1eGoogle\\Cloud\\SecurityCenter\\V1\xea\x02!Google::Cloud::SecurityCenter::V1b\x06proto3' ), - dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR], + dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,], ) @@ -133,7 +133,7 @@ ), ], extensions=[], - nested_types=[_SECURITYMARKS_MARKSENTRY], + nested_types=[_SECURITYMARKS_MARKSENTRY,], enum_types=[], serialized_options=None, is_extendable=False, diff --git a/securitycenter/google/cloud/securitycenter_v1/proto/securitycenter_service_pb2.py b/securitycenter/google/cloud/securitycenter_v1/proto/securitycenter_service_pb2.py index 87d92ccf653a..ec734191b77b 100644 --- a/securitycenter/google/cloud/securitycenter_v1/proto/securitycenter_service_pb2.py +++ b/securitycenter/google/cloud/securitycenter_v1/proto/securitycenter_service_pb2.py @@ -288,7 +288,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -327,7 +327,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -923,7 +923,7 @@ ), ], extensions=[], - nested_types=[_GROUPRESULT_PROPERTIESENTRY], + nested_types=[_GROUPRESULT_PROPERTIESENTRY,], enum_types=[], serialized_options=None, is_extendable=False, @@ -1278,7 +1278,7 @@ ], extensions=[], nested_types=[], - enum_types=[_LISTASSETSRESPONSE_LISTASSETSRESULT_STATECHANGE], + enum_types=[_LISTASSETSRESPONSE_LISTASSETSRESULT_STATECHANGE,], serialized_options=None, is_extendable=False, syntax="proto3", @@ -1369,7 +1369,7 @@ ), ], extensions=[], - nested_types=[_LISTASSETSRESPONSE_LISTASSETSRESULT], + nested_types=[_LISTASSETSRESPONSE_LISTASSETSRESULT,], enum_types=[], serialized_options=None, is_extendable=False, @@ -1592,7 +1592,7 @@ ], extensions=[], nested_types=[], - enum_types=[_LISTFINDINGSRESPONSE_LISTFINDINGSRESULT_STATECHANGE], + enum_types=[_LISTFINDINGSRESPONSE_LISTFINDINGSRESULT_STATECHANGE,], serialized_options=None, is_extendable=False, syntax="proto3", @@ -1683,7 +1683,7 @@ ), ], extensions=[], - nested_types=[_LISTFINDINGSRESPONSE_LISTFINDINGSRESULT], + nested_types=[_LISTFINDINGSRESPONSE_LISTFINDINGSRESULT,], enum_types=[], serialized_options=None, is_extendable=False, @@ -1794,7 +1794,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], diff --git a/securitycenter/google/cloud/securitycenter_v1/proto/source_pb2.py b/securitycenter/google/cloud/securitycenter_v1/proto/source_pb2.py index b9e7fe1d5034..0e0363afce7e 100644 --- a/securitycenter/google/cloud/securitycenter_v1/proto/source_pb2.py +++ b/securitycenter/google/cloud/securitycenter_v1/proto/source_pb2.py @@ -28,7 +28,7 @@ serialized_pb=_b( '\n1google/cloud/securitycenter_v1/proto/source.proto\x12\x1egoogle.cloud.securitycenter.v1\x1a\x1cgoogle/api/annotations.proto"G\n\x06Source\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x02 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\tJ\x04\x08\x04\x10\x05\x42\xda\x01\n"com.google.cloud.securitycenter.v1P\x01ZLgoogle.golang.org/genproto/googleapis/cloud/securitycenter/v1;securitycenter\xaa\x02\x1eGoogle.Cloud.SecurityCenter.V1\xca\x02\x1eGoogle\\Cloud\\SecurityCenter\\V1\xea\x02!Google::Cloud::SecurityCenter::V1b\x06proto3' ), - dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR], + dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,], ) diff --git a/securitycenter/google/cloud/securitycenter_v1beta1/__init__.py b/securitycenter/google/cloud/securitycenter_v1beta1/__init__.py index f15d206643a9..15ad4154ae86 100644 --- a/securitycenter/google/cloud/securitycenter_v1beta1/__init__.py +++ b/securitycenter/google/cloud/securitycenter_v1beta1/__init__.py @@ -27,4 +27,8 @@ class SecurityCenterClient(security_center_client.SecurityCenterClient): enums = enums -__all__ = ("enums", "types", "SecurityCenterClient") +__all__ = ( + "enums", + "types", + "SecurityCenterClient", +) diff --git a/securitycenter/google/cloud/securitycenter_v1beta1/gapic/security_center_client.py b/securitycenter/google/cloud/securitycenter_v1beta1/gapic/security_center_client.py index 7b26dffaf78a..1e0535b01237 100644 --- a/securitycenter/google/cloud/securitycenter_v1beta1/gapic/security_center_client.py +++ b/securitycenter/google/cloud/securitycenter_v1beta1/gapic/security_center_client.py @@ -55,7 +55,7 @@ _GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( - "google-cloud-securitycenter" + "google-cloud-securitycenter", ).version @@ -122,7 +122,7 @@ def finding_security_marks_path(cls, organization, source, finding): def organization_path(cls, organization): """Return a fully-qualified organization string.""" return google.api_core.path_template.expand( - "organizations/{organization}", organization=organization + "organizations/{organization}", organization=organization, ) @classmethod @@ -229,12 +229,12 @@ def __init__( self.transport = transport else: self.transport = security_center_grpc_transport.SecurityCenterGrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials + address=api_endpoint, channel=channel, credentials=credentials, ) if client_info is None: client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION + gapic_version=_GAPIC_LIBRARY_VERSION, ) else: client_info.gapic_version = _GAPIC_LIBRARY_VERSION @@ -245,7 +245,7 @@ def __init__( # (Ordinarily, these are the defaults specified in the `*_config.py` # file next to this one.) self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config["interfaces"][self._INTERFACE_NAME] + client_config["interfaces"][self._INTERFACE_NAME], ) # Save a dictionary of cached API call functions. @@ -317,7 +317,7 @@ def create_source( ) request = securitycenter_service_pb2.CreateSourceRequest( - parent=parent, source=source + parent=parent, source=source, ) if metadata is None: metadata = [] @@ -406,7 +406,7 @@ def create_finding( ) request = securitycenter_service_pb2.CreateFindingRequest( - parent=parent, finding_id=finding_id, finding=finding + parent=parent, finding_id=finding_id, finding=finding, ) if metadata is None: metadata = [] @@ -484,7 +484,7 @@ def get_iam_policy( ) request = iam_policy_pb2.GetIamPolicyRequest( - resource=resource, options=options_ + resource=resource, options=options_, ) if metadata is None: metadata = [] @@ -555,7 +555,7 @@ def get_organization_settings( client_info=self._client_info, ) - request = securitycenter_service_pb2.GetOrganizationSettingsRequest(name=name) + request = securitycenter_service_pb2.GetOrganizationSettingsRequest(name=name,) if metadata is None: metadata = [] metadata = list(metadata) @@ -625,7 +625,7 @@ def get_source( client_info=self._client_info, ) - request = securitycenter_service_pb2.GetSourceRequest(name=name) + request = securitycenter_service_pb2.GetSourceRequest(name=name,) if metadata is None: metadata = [] metadata = list(metadata) @@ -1428,7 +1428,7 @@ def list_sources( ) request = securitycenter_service_pb2.ListSourcesRequest( - parent=parent, page_size=page_size + parent=parent, page_size=page_size, ) if metadata is None: metadata = [] @@ -1524,7 +1524,7 @@ def run_asset_discovery( client_info=self._client_info, ) - request = securitycenter_service_pb2.RunAssetDiscoveryRequest(parent=parent) + request = securitycenter_service_pb2.RunAssetDiscoveryRequest(parent=parent,) if metadata is None: metadata = [] metadata = list(metadata) @@ -1616,7 +1616,7 @@ def set_finding_state( ) request = securitycenter_service_pb2.SetFindingStateRequest( - name=name, state=state, start_time=start_time + name=name, state=state, start_time=start_time, ) if metadata is None: metadata = [] @@ -1698,7 +1698,7 @@ def set_iam_policy( client_info=self._client_info, ) - request = iam_policy_pb2.SetIamPolicyRequest(resource=resource, policy=policy) + request = iam_policy_pb2.SetIamPolicyRequest(resource=resource, policy=policy,) if metadata is None: metadata = [] metadata = list(metadata) @@ -1777,7 +1777,7 @@ def test_iam_permissions( ) request = iam_policy_pb2.TestIamPermissionsRequest( - resource=resource, permissions=permissions + resource=resource, permissions=permissions, ) if metadata is None: metadata = [] @@ -1864,7 +1864,7 @@ def update_finding( ) request = securitycenter_service_pb2.UpdateFindingRequest( - finding=finding, update_mask=update_mask + finding=finding, update_mask=update_mask, ) if metadata is None: metadata = [] @@ -1946,7 +1946,7 @@ def update_organization_settings( ) request = securitycenter_service_pb2.UpdateOrganizationSettingsRequest( - organization_settings=organization_settings, update_mask=update_mask + organization_settings=organization_settings, update_mask=update_mask, ) if metadata is None: metadata = [] @@ -2028,7 +2028,7 @@ def update_source( ) request = securitycenter_service_pb2.UpdateSourceRequest( - source=source, update_mask=update_mask + source=source, update_mask=update_mask, ) if metadata is None: metadata = [] diff --git a/securitycenter/google/cloud/securitycenter_v1beta1/gapic/transports/security_center_grpc_transport.py b/securitycenter/google/cloud/securitycenter_v1beta1/gapic/transports/security_center_grpc_transport.py index 46419728cc2d..a7ea75051156 100644 --- a/securitycenter/google/cloud/securitycenter_v1beta1/gapic/transports/security_center_grpc_transport.py +++ b/securitycenter/google/cloud/securitycenter_v1beta1/gapic/transports/security_center_grpc_transport.py @@ -57,7 +57,7 @@ def __init__( # exception (channels come with credentials baked in already). if channel is not None and credentials is not None: raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive." + "The `channel` and `credentials` arguments are mutually " "exclusive.", ) # Create the channel. @@ -78,7 +78,7 @@ def __init__( self._stubs = { "security_center_stub": securitycenter_service_pb2_grpc.SecurityCenterStub( channel - ) + ), } # Because this API includes a method that returns a diff --git a/securitycenter/google/cloud/securitycenter_v1beta1/proto/asset_pb2.py b/securitycenter/google/cloud/securitycenter_v1beta1/proto/asset_pb2.py index fc37a148ba90..82106ac488af 100644 --- a/securitycenter/google/cloud/securitycenter_v1beta1/proto/asset_pb2.py +++ b/securitycenter/google/cloud/securitycenter_v1beta1/proto/asset_pb2.py @@ -325,7 +325,7 @@ ), ], extensions=[], - nested_types=[_ASSET_SECURITYCENTERPROPERTIES, _ASSET_RESOURCEPROPERTIESENTRY], + nested_types=[_ASSET_SECURITYCENTERPROPERTIES, _ASSET_RESOURCEPROPERTIESENTRY,], enum_types=[], serialized_options=None, is_extendable=False, diff --git a/securitycenter/google/cloud/securitycenter_v1beta1/proto/finding_pb2.py b/securitycenter/google/cloud/securitycenter_v1beta1/proto/finding_pb2.py index a85a20232890..0415351d44c6 100644 --- a/securitycenter/google/cloud/securitycenter_v1beta1/proto/finding_pb2.py +++ b/securitycenter/google/cloud/securitycenter_v1beta1/proto/finding_pb2.py @@ -315,8 +315,8 @@ ), ], extensions=[], - nested_types=[_FINDING_SOURCEPROPERTIESENTRY], - enum_types=[_FINDING_STATE], + nested_types=[_FINDING_SOURCEPROPERTIESENTRY,], + enum_types=[_FINDING_STATE,], serialized_options=None, is_extendable=False, syntax="proto3", diff --git a/securitycenter/google/cloud/securitycenter_v1beta1/proto/organization_settings_pb2.py b/securitycenter/google/cloud/securitycenter_v1beta1/proto/organization_settings_pb2.py index e7c1588af71e..d8d1b3704faf 100644 --- a/securitycenter/google/cloud/securitycenter_v1beta1/proto/organization_settings_pb2.py +++ b/securitycenter/google/cloud/securitycenter_v1beta1/proto/organization_settings_pb2.py @@ -28,7 +28,7 @@ serialized_pb=_b( '\nEgoogle/cloud/securitycenter_v1beta1/proto/organization_settings.proto\x12#google.cloud.securitycenter.v1beta1\x1a\x1cgoogle/api/annotations.proto"\xa8\x03\n\x14OrganizationSettings\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x1e\n\x16\x65nable_asset_discovery\x18\x02 \x01(\x08\x12n\n\x16\x61sset_discovery_config\x18\x03 \x01(\x0b\x32N.google.cloud.securitycenter.v1beta1.OrganizationSettings.AssetDiscoveryConfig\x1a\xf1\x01\n\x14\x41ssetDiscoveryConfig\x12\x13\n\x0bproject_ids\x18\x01 \x03(\t\x12t\n\x0einclusion_mode\x18\x02 \x01(\x0e\x32\\.google.cloud.securitycenter.v1beta1.OrganizationSettings.AssetDiscoveryConfig.InclusionMode"N\n\rInclusionMode\x12\x1e\n\x1aINCLUSION_MODE_UNSPECIFIED\x10\x00\x12\x10\n\x0cINCLUDE_ONLY\x10\x01\x12\x0b\n\x07\x45XCLUDE\x10\x02\x42~\n\'com.google.cloud.securitycenter.v1beta1P\x01ZQgoogle.golang.org/genproto/googleapis/cloud/securitycenter/v1beta1;securitycenterb\x06proto3' ), - dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR], + dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,], ) @@ -106,7 +106,7 @@ ], extensions=[], nested_types=[], - enum_types=[_ORGANIZATIONSETTINGS_ASSETDISCOVERYCONFIG_INCLUSIONMODE], + enum_types=[_ORGANIZATIONSETTINGS_ASSETDISCOVERYCONFIG_INCLUSIONMODE,], serialized_options=None, is_extendable=False, syntax="proto3", @@ -179,7 +179,7 @@ ), ], extensions=[], - nested_types=[_ORGANIZATIONSETTINGS_ASSETDISCOVERYCONFIG], + nested_types=[_ORGANIZATIONSETTINGS_ASSETDISCOVERYCONFIG,], enum_types=[], serialized_options=None, is_extendable=False, diff --git a/securitycenter/google/cloud/securitycenter_v1beta1/proto/security_marks_pb2.py b/securitycenter/google/cloud/securitycenter_v1beta1/proto/security_marks_pb2.py index dcee636baff8..8487bd9ae00d 100644 --- a/securitycenter/google/cloud/securitycenter_v1beta1/proto/security_marks_pb2.py +++ b/securitycenter/google/cloud/securitycenter_v1beta1/proto/security_marks_pb2.py @@ -28,7 +28,7 @@ serialized_pb=_b( "\n>google/cloud/securitycenter_v1beta1/proto/security_marks.proto\x12#google.cloud.securitycenter.v1beta1\x1a\x1cgoogle/api/annotations.proto\"\x99\x01\n\rSecurityMarks\x12\x0c\n\x04name\x18\x01 \x01(\t\x12L\n\x05marks\x18\x02 \x03(\x0b\x32=.google.cloud.securitycenter.v1beta1.SecurityMarks.MarksEntry\x1a,\n\nMarksEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42~\n'com.google.cloud.securitycenter.v1beta1P\x01ZQgoogle.golang.org/genproto/googleapis/cloud/securitycenter/v1beta1;securitycenterb\x06proto3" ), - dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR], + dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,], ) @@ -133,7 +133,7 @@ ), ], extensions=[], - nested_types=[_SECURITYMARKS_MARKSENTRY], + nested_types=[_SECURITYMARKS_MARKSENTRY,], enum_types=[], serialized_options=None, is_extendable=False, diff --git a/securitycenter/google/cloud/securitycenter_v1beta1/proto/securitycenter_service_pb2.py b/securitycenter/google/cloud/securitycenter_v1beta1/proto/securitycenter_service_pb2.py index 42d309b63cdb..804cae1e5b39 100644 --- a/securitycenter/google/cloud/securitycenter_v1beta1/proto/securitycenter_service_pb2.py +++ b/securitycenter/google/cloud/securitycenter_v1beta1/proto/securitycenter_service_pb2.py @@ -262,7 +262,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -301,7 +301,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -843,7 +843,7 @@ ), ], extensions=[], - nested_types=[_GROUPRESULT_PROPERTIESENTRY], + nested_types=[_GROUPRESULT_PROPERTIESENTRY,], enum_types=[], serialized_options=None, is_extendable=False, @@ -1198,7 +1198,7 @@ ], extensions=[], nested_types=[], - enum_types=[_LISTASSETSRESPONSE_LISTASSETSRESULT_STATE], + enum_types=[_LISTASSETSRESPONSE_LISTASSETSRESULT_STATE,], serialized_options=None, is_extendable=False, syntax="proto3", @@ -1289,7 +1289,7 @@ ), ], extensions=[], - nested_types=[_LISTASSETSRESPONSE_LISTASSETSRESULT], + nested_types=[_LISTASSETSRESPONSE_LISTASSETSRESULT,], enum_types=[], serialized_options=None, is_extendable=False, @@ -1640,7 +1640,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], diff --git a/securitycenter/google/cloud/securitycenter_v1beta1/proto/source_pb2.py b/securitycenter/google/cloud/securitycenter_v1beta1/proto/source_pb2.py index 0fa2b9c2771d..5b155e82bf61 100644 --- a/securitycenter/google/cloud/securitycenter_v1beta1/proto/source_pb2.py +++ b/securitycenter/google/cloud/securitycenter_v1beta1/proto/source_pb2.py @@ -28,7 +28,7 @@ serialized_pb=_b( "\n6google/cloud/securitycenter_v1beta1/proto/source.proto\x12#google.cloud.securitycenter.v1beta1\x1a\x1cgoogle/api/annotations.proto\"A\n\x06Source\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x02 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\tB~\n'com.google.cloud.securitycenter.v1beta1P\x01ZQgoogle.golang.org/genproto/googleapis/cloud/securitycenter/v1beta1;securitycenterb\x06proto3" ), - dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR], + dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,], ) diff --git a/securitycenter/synth.metadata b/securitycenter/synth.metadata index 9530dd07871f..139d6ecd64ba 100644 --- a/securitycenter/synth.metadata +++ b/securitycenter/synth.metadata @@ -1,26 +1,26 @@ { - "updateTime": "2019-08-06T12:37:26.268094Z", + "updateTime": "2019-10-29T12:35:55.798976Z", "sources": [ { "generator": { "name": "artman", - "version": "0.32.1", - "dockerImage": "googleapis/artman@sha256:a684d40ba9a4e15946f5f2ca6b4bd9fe301192f522e9de4fff622118775f309b" + "version": "0.40.3", + "dockerImage": "googleapis/artman@sha256:c805f50525f5f557886c94ab76f56eaa09cb1da58c3ee95111fd34259376621a" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "e699b0cba64ffddfae39633417180f1f65875896", - "internalRef": "261759677" + "sha": "532773acbed8d09451dafb3d403ab1823e6a6e1e", + "internalRef": "277177415" } }, { "template": { "name": "python_library", "origin": "synthtool.gcp", - "version": "2019.5.2" + "version": "2019.10.17" } } ], diff --git a/spanner/CHANGELOG.md b/spanner/CHANGELOG.md index d217c95b3be7..97593703a647 100644 --- a/spanner/CHANGELOG.md +++ b/spanner/CHANGELOG.md @@ -4,6 +4,47 @@ [1]: https://pypi.org/project/google-cloud-spanner/#history +## 1.12.0 + +10-23-2019 19:09 PDT + + +### Implementation Changes +- Add `batch_create_session` calls to session pools. ([#9488](https://github.com/googleapis/google-cloud-python/pull/9488)) + +### New Features +- Add `client_options` to client constructor. ([#9151](https://github.com/googleapis/google-cloud-python/pull/9151)) + +### Internal / Testing Changes +- Harden 'test_reload_instance' systest against eventual consistency failures. ([#9394](https://github.com/googleapis/google-cloud-python/pull/9394)) +- Harden 'test_transaction_batch_update_w_syntax_error' systest. ([#9395](https://github.com/googleapis/google-cloud-python/pull/9395)) +- Propagate errors from 'Transaction.batch_update' in systest. ([#9393](https://github.com/googleapis/google-cloud-python/pull/9393)) + +## 1.11.0 + +10-15-2019 06:55 PDT + + +### Implementation Changes +- Adjust gRPC timeouts (via synth). ([#9330](https://github.com/googleapis/google-cloud-python/pull/9330)) +- Make `session_count` optional for `SpannerClient.batch_create_sessions` (via synth). ([#9280](https://github.com/googleapis/google-cloud-python/pull/9280)) +- Remove send / receive message size limit, update docstrings (via synth). ([#8968](https://github.com/googleapis/google-cloud-python/pull/8968)) + +### New Features +- Add `batch_create_sessions` method to generated client (via synth). ([#9087](https://github.com/googleapis/google-cloud-python/pull/9087)) + +### Dependencies +- Pin 'google-cloud-core >= 1.0.3, < 2.0.0dev'. ([#9445](https://github.com/googleapis/google-cloud-python/pull/9445)) + +### Documentation +- Remove references to old authentication credentials in docs. ([#9456](https://github.com/googleapis/google-cloud-python/pull/9456)) +- Fix intersphinx reference to requests. ([#9294](https://github.com/googleapis/google-cloud-python/pull/9294)) +- Fix `run_in_transaction` return value docs. ([#9264](https://github.com/googleapis/google-cloud-python/pull/9264)) +- Remove CI for gh-pages, use googleapis.dev for `api_core` refs. ([#9085](https://github.com/googleapis/google-cloud-python/pull/9085)) +- Remove compatability badges from READMEs. ([#9035](https://github.com/googleapis/google-cloud-python/pull/9035)) +- Add DML insert and update examples to README. ([#8698](https://github.com/googleapis/google-cloud-python/pull/8698)) +- Update intersphinx mapping for requests. ([#8805](https://github.com/googleapis/google-cloud-python/pull/8805)) + ## 1.10.0 07-24-2019 17:32 PDT diff --git a/spanner/google/cloud/spanner_v1/client.py b/spanner/google/cloud/spanner_v1/client.py index a6f3bd25f5e6..b35bf19f0796 100644 --- a/spanner/google/cloud/spanner_v1/client.py +++ b/spanner/google/cloud/spanner_v1/client.py @@ -93,11 +93,12 @@ class Client(ClientWithProject): attempt to determine from the environment. :type credentials: - :class:`OAuth2Credentials ` or + :class:`Credentials ` or :data:`NoneType ` - :param credentials: (Optional) The OAuth2 Credentials to use for this - client. If not provided, defaults to the Google - Application Default Credentials. + :param credentials: (Optional) The authorization credentials to attach to requests. + These credentials identify this application to the service. + If none are specified, the client will attempt to ascertain + the credentials from the environment. :type client_info: :class:`google.api_core.gapic_v1.client_info.ClientInfo` :param client_info: @@ -110,6 +111,10 @@ class Client(ClientWithProject): :param user_agent: (Deprecated) The user agent to be used with API request. Not used. + :type client_options: :class:`~google.api_core.client_options.ClientOptions` + or :class:`dict` + :param client_options: (Optional) Client options used to set user options + on the client. API Endpoint should be set through client_options. :raises: :class:`ValueError ` if both ``read_only`` and ``admin`` are :data:`True` @@ -124,7 +129,12 @@ class Client(ClientWithProject): """The scopes required for Google Cloud Spanner.""" def __init__( - self, project=None, credentials=None, client_info=_CLIENT_INFO, user_agent=None + self, + project=None, + credentials=None, + client_info=_CLIENT_INFO, + user_agent=None, + client_options=None, ): # NOTE: This API has no use for the _http argument, but sending it # will have no impact since the _http() @property only lazily @@ -133,6 +143,7 @@ def __init__( project=project, credentials=credentials, _http=None ) self._client_info = client_info + self._client_options = client_options if user_agent is not None: warnings.warn(_USER_AGENT_DEPRECATED, DeprecationWarning, stacklevel=2) @@ -143,7 +154,7 @@ def credentials(self): """Getter for client's credentials. :rtype: - :class:`OAuth2Credentials ` + :class:`Credentials ` :returns: The credentials stored on the client. """ return self._credentials @@ -172,7 +183,9 @@ def instance_admin_api(self): """Helper for session-related API calls.""" if self._instance_admin_api is None: self._instance_admin_api = InstanceAdminClient( - credentials=self.credentials, client_info=self._client_info + credentials=self.credentials, + client_info=self._client_info, + client_options=self._client_options, ) return self._instance_admin_api @@ -181,7 +194,9 @@ def database_admin_api(self): """Helper for session-related API calls.""" if self._database_admin_api is None: self._database_admin_api = DatabaseAdminClient( - credentials=self.credentials, client_info=self._client_info + credentials=self.credentials, + client_info=self._client_info, + client_options=self._client_options, ) return self._database_admin_api diff --git a/spanner/google/cloud/spanner_v1/database.py b/spanner/google/cloud/spanner_v1/database.py index f0f06dbbd637..f561ecd4fa9e 100644 --- a/spanner/google/cloud/spanner_v1/database.py +++ b/spanner/google/cloud/spanner_v1/database.py @@ -177,8 +177,11 @@ def spanner_api(self): if isinstance(credentials, google.auth.credentials.Scoped): credentials = credentials.with_scopes((SPANNER_DATA_SCOPE,)) client_info = self._instance._client._client_info + client_options = self._instance._client._client_options self._spanner_api = SpannerClient( - credentials=credentials, client_info=client_info + credentials=credentials, + client_info=client_info, + client_options=client_options, ) return self._spanner_api @@ -416,9 +419,10 @@ def run_in_transaction(self, func, *args, **kw): :param args: additional positional arguments to be passed to ``func``. :type kw: dict - :param kw: optional keyword arguments to be passed to ``func``. + :param kw: (Optional) keyword arguments to be passed to ``func``. If passed, "timeout_secs" will be removed and used to - override the default timeout. + override the default retry timeout which defines maximum timestamp + to continue retrying the transaction. :rtype: Any :returns: The return value of ``func``. diff --git a/spanner/google/cloud/spanner_v1/gapic/spanner_client_config.py b/spanner/google/cloud/spanner_v1/gapic/spanner_client_config.py index 0b4722fd8d06..333f72afe28c 100644 --- a/spanner/google/cloud/spanner_v1/gapic/spanner_client_config.py +++ b/spanner/google/cloud/spanner_v1/gapic/spanner_client_config.py @@ -11,19 +11,19 @@ "initial_retry_delay_millis": 250, "retry_delay_multiplier": 1.3, "max_retry_delay_millis": 32000, - "initial_rpc_timeout_millis": 60000, + "initial_rpc_timeout_millis": 360000, "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 60000, - "total_timeout_millis": 600000, + "max_rpc_timeout_millis": 360000, + "total_timeout_millis": 3600000, }, "streaming": { "initial_retry_delay_millis": 250, "retry_delay_multiplier": 1.3, "max_retry_delay_millis": 32000, - "initial_rpc_timeout_millis": 120000, + "initial_rpc_timeout_millis": 360000, "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 120000, - "total_timeout_millis": 1200000, + "max_rpc_timeout_millis": 360000, + "total_timeout_millis": 3600000, }, "long_running": { "initial_retry_delay_millis": 250, diff --git a/spanner/google/cloud/spanner_v1/pool.py b/spanner/google/cloud/spanner_v1/pool.py index 823681fbc864..4ef5aee9baab 100644 --- a/spanner/google/cloud/spanner_v1/pool.py +++ b/spanner/google/cloud/spanner_v1/pool.py @@ -17,9 +17,9 @@ import datetime from six.moves import queue -from six.moves import xrange from google.cloud.exceptions import NotFound +from google.cloud.spanner_v1._helpers import _metadata_with_prefix _NOW = datetime.datetime.utcnow # unit tests may replace @@ -166,11 +166,20 @@ def bind(self, database): when needed. """ self._database = database + api = database.spanner_api + metadata = _metadata_with_prefix(database.name) while not self._sessions.full(): - session = self._new_session() - session.create() - self._sessions.put(session) + resp = api.batch_create_sessions( + database.name, + self.size - self._sessions.qsize(), + timeout=self.default_timeout, + metadata=metadata, + ) + for session_pb in resp.session: + session = self._new_session() + session._session_id = session_pb.name.split("/")[-1] + self._sessions.put(session) def get(self, timeout=None): # pylint: disable=arguments-differ """Check a session out from the pool. @@ -350,11 +359,22 @@ def bind(self, database): when needed. """ self._database = database - - for _ in xrange(self.size): - session = self._new_session() - session.create() - self.put(session) + api = database.spanner_api + metadata = _metadata_with_prefix(database.name) + created_session_count = 0 + + while created_session_count < self.size: + resp = api.batch_create_sessions( + database.name, + self.size - created_session_count, + timeout=self.default_timeout, + metadata=metadata, + ) + for session_pb in resp.session: + session = self._new_session() + session._session_id = session_pb.name.split("/")[-1] + self.put(session) + created_session_count += len(resp.session) def get(self, timeout=None): # pylint: disable=arguments-differ """Check a session out from the pool. diff --git a/spanner/google/cloud/spanner_v1/session.py b/spanner/google/cloud/spanner_v1/session.py index 4685c8b80137..f8e7e88d9731 100644 --- a/spanner/google/cloud/spanner_v1/session.py +++ b/spanner/google/cloud/spanner_v1/session.py @@ -273,9 +273,10 @@ def run_in_transaction(self, func, *args, **kw): :param args: additional positional arguments to be passed to ``func``. :type kw: dict - :param kw: optional keyword arguments to be passed to ``func``. + :param kw: (Optional) keyword arguments to be passed to ``func``. If passed, "timeout_secs" will be removed and used to - override the default timeout. + override the default retry timeout which defines maximum timestamp + to continue retrying the transaction. :rtype: Any :returns: The return value of ``func``. diff --git a/spanner/setup.py b/spanner/setup.py index 5884bf628b7c..c8c47ef4a8d8 100644 --- a/spanner/setup.py +++ b/spanner/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-spanner" description = "Cloud Spanner API client library" -version = "1.10.0" +version = "1.12.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' @@ -30,7 +30,7 @@ release_status = "Development Status :: 5 - Production/Stable" dependencies = [ "google-api-core[grpc, grpcgcp] >= 1.14.0, < 2.0.0dev", - "google-cloud-core >= 1.0.0, < 2.0dev", + "google-cloud-core >= 1.0.3, < 2.0dev", "grpc-google-iam-v1 >= 0.12.3, < 0.13dev", ] extras = {} diff --git a/spanner/synth.metadata b/spanner/synth.metadata index e106f8893452..285778e951a2 100644 --- a/spanner/synth.metadata +++ b/spanner/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-09-24T12:30:40.124680Z", + "updateTime": "2019-09-27T12:29:07.043834Z", "sources": [ { "generator": { "name": "artman", - "version": "0.37.0", - "dockerImage": "googleapis/artman@sha256:0f66008f69061ea6d41499e2a34da3fc64fc7c9798077e3a37158653a135d801" + "version": "0.37.1", + "dockerImage": "googleapis/artman@sha256:6068f67900a3f0bdece596b97bda8fc70406ca0e137a941f4c81d3217c994a80" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "fe6115fdfae318277426ec0e11b4b05e2b150723", - "internalRef": "270882829" + "sha": "cd112d8d255e0099df053643d4bd12c228ef7b1b", + "internalRef": "271468707" } }, { diff --git a/spanner/tests/system/test_system.py b/spanner/tests/system/test_system.py index 730190444edf..abfd1297d7ce 100644 --- a/spanner/tests/system/test_system.py +++ b/spanner/tests/system/test_system.py @@ -24,9 +24,12 @@ import uuid import pytest +import grpc +from google.rpc import code_pb2 from google.api_core import exceptions from google.api_core.datetime_helpers import DatetimeWithNanoseconds + from google.cloud.spanner_v1 import param_types from google.cloud.spanner_v1.proto.type_pb2 import ARRAY from google.cloud.spanner_v1.proto.type_pb2 import BOOL @@ -64,6 +67,10 @@ COUNTERS_TABLE = "counters" COUNTERS_COLUMNS = ("name", "value") +_STATUS_CODE_TO_GRPC_STATUS_CODE = { + member.value[0]: member for member in grpc.StatusCode +} + class Config(object): """Run-time configuration to be modified at set-up. @@ -146,7 +153,13 @@ def test_reload_instance(self): # Make sure metadata unset before reloading. instance.display_name = None - instance.reload() + def _expected_display_name(instance): + return instance.display_name == Config.INSTANCE.display_name + + retry = RetryInstanceState(_expected_display_name) + + retry(instance.reload)() + self.assertEqual(instance.display_name, Config.INSTANCE.display_name) @unittest.skipUnless(CREATE_INSTANCE, "Skipping instance creation") @@ -776,6 +789,15 @@ def test_transaction_execute_update_then_insert_commit(self): # [END spanner_test_dml_update] # [END spanner_test_dml_with_mutation] + @staticmethod + def _check_batch_status(status_code, expected=code_pb2.OK): + if status_code != expected: + grpc_status_code = _STATUS_CODE_TO_GRPC_STATUS_CODE[status_code] + call = FauxCall(status_code) + raise exceptions.from_grpc_status( + grpc_status_code, "batch_update failed", errors=[call] + ) + def test_transaction_batch_update_success(self): # [START spanner_test_dml_with_mutation] # [START spanner_test_dml_update] @@ -808,7 +830,7 @@ def unit_of_work(transaction, self): status, row_counts = transaction.batch_update( [insert_statement, update_statement, delete_statement] ) - self.assertEqual(status.code, 0) # XXX: where are values defined? + self._check_batch_status(status.code) self.assertEqual(len(row_counts), 3) for row_count in row_counts: self.assertEqual(row_count, 1) @@ -849,7 +871,7 @@ def unit_of_work(transaction, self): status, row_counts = transaction.batch_update( insert_statements + update_statements ) - self.assertEqual(status.code, 0) # XXX: where are values defined? + self._check_batch_status(status.code) self.assertEqual(len(row_counts), len(insert_statements) + 1) for row_count in row_counts: self.assertEqual(row_count, 1) @@ -886,18 +908,18 @@ def test_transaction_batch_update_w_syntax_error(self): {"contact_id": Type(code=INT64)}, ) - with session.transaction() as transaction: + def unit_of_work(transaction): rows = list(transaction.read(self.TABLE, self.COLUMNS, self.ALL)) self.assertEqual(rows, []) status, row_counts = transaction.batch_update( [insert_statement, update_statement, delete_statement] ) + self._check_batch_status(status.code, code_pb2.INVALID_ARGUMENT) + self.assertEqual(len(row_counts), 1) + self.assertEqual(row_counts[0], 1) - self.assertEqual(status.code, 3) # XXX: where are values defined? - self.assertEqual(len(row_counts), 1) - for row_count in row_counts: - self.assertEqual(row_count, 1) + session.run_in_transaction(unit_of_work) def test_transaction_batch_update_wo_statements(self): from google.api_core.exceptions import InvalidArgument @@ -2177,3 +2199,21 @@ def _handle_abort_unit_of_work(self, transaction): def handle_abort(self, database): database.run_in_transaction(self._handle_abort_unit_of_work) self.handler_done.set() + + +class FauxCall(object): + def __init__(self, code, details="FauxCall"): + self._code = code + self._details = details + + def initial_metadata(self): + return {} + + def trailing_metadata(self): + return {} + + def code(self): + return self._code + + def details(self): + return self._details diff --git a/spanner/tests/unit/test_client.py b/spanner/tests/unit/test_client.py index 8cef6313afe9..e42031cea4fb 100644 --- a/spanner/tests/unit/test_client.py +++ b/spanner/tests/unit/test_client.py @@ -55,6 +55,7 @@ def _constructor_test_helper( expected_creds=None, client_info=None, user_agent=None, + client_options=None, ): from google.cloud.spanner_v1 import client as MUT @@ -79,6 +80,7 @@ def _constructor_test_helper( self.assertEqual(client.project, self.PROJECT) self.assertIs(client._client_info, expected_client_info) self.assertEqual(client.user_agent, user_agent) + self.assertEqual(client._client_options, client_options) def test_constructor_default_scopes(self): from google.cloud.spanner_v1 import client as MUT @@ -130,8 +132,12 @@ def test_instance_admin_api(self): credentials = _make_credentials() client_info = mock.Mock() + client_options = mock.Mock() client = self._make_one( - project=self.PROJECT, credentials=credentials, client_info=client_info + project=self.PROJECT, + credentials=credentials, + client_info=client_info, + client_options=client_options, ) expected_scopes = (SPANNER_ADMIN_SCOPE,) @@ -146,7 +152,9 @@ def test_instance_admin_api(self): self.assertIs(again, api) instance_admin_client.assert_called_once_with( - credentials=credentials.with_scopes.return_value, client_info=client_info + credentials=credentials.with_scopes.return_value, + client_info=client_info, + client_options=client_options, ) credentials.with_scopes.assert_called_once_with(expected_scopes) @@ -156,8 +164,12 @@ def test_database_admin_api(self): credentials = _make_credentials() client_info = mock.Mock() + client_options = mock.Mock() client = self._make_one( - project=self.PROJECT, credentials=credentials, client_info=client_info + project=self.PROJECT, + credentials=credentials, + client_info=client_info, + client_options=client_options, ) expected_scopes = (SPANNER_ADMIN_SCOPE,) @@ -172,7 +184,9 @@ def test_database_admin_api(self): self.assertIs(again, api) database_admin_client.assert_called_once_with( - credentials=credentials.with_scopes.return_value, client_info=client_info + credentials=credentials.with_scopes.return_value, + client_info=client_info, + client_options=client_options, ) credentials.with_scopes.assert_called_once_with(expected_scopes) diff --git a/spanner/tests/unit/test_database.py b/spanner/tests/unit/test_database.py index e553e0bbb8dc..f6f367e00161 100644 --- a/spanner/tests/unit/test_database.py +++ b/spanner/tests/unit/test_database.py @@ -233,6 +233,7 @@ def test_name_property(self): def test_spanner_api_property_w_scopeless_creds(self): client = _Client() client_info = client._client_info = mock.Mock() + client_options = client._client_options = mock.Mock() credentials = client.credentials = object() instance = _Instance(self.INSTANCE_NAME, client=client) pool = _Pool() @@ -250,7 +251,9 @@ def test_spanner_api_property_w_scopeless_creds(self): self.assertIs(again, api) spanner_client.assert_called_once_with( - credentials=credentials, client_info=client_info + credentials=credentials, + client_info=client_info, + client_options=client_options, ) def test_spanner_api_w_scoped_creds(self): @@ -271,6 +274,7 @@ def with_scopes(self, scopes): expected_scopes = (SPANNER_DATA_SCOPE,) client = _Client() client_info = client._client_info = mock.Mock() + client_options = client._client_options = mock.Mock() credentials = client.credentials = _CredentialsWithScopes() instance = _Instance(self.INSTANCE_NAME, client=client) pool = _Pool() @@ -291,6 +295,7 @@ def with_scopes(self, scopes): called_args, called_kw = spanner_client.call_args self.assertEqual(called_args, ()) self.assertEqual(called_kw["client_info"], client_info) + self.assertEqual(called_kw["client_options"], client_options) scoped = called_kw["credentials"] self.assertEqual(scoped._scopes, expected_scopes) self.assertIs(scoped._source, credentials) diff --git a/spanner/tests/unit/test_pool.py b/spanner/tests/unit/test_pool.py index 549044b1f423..eded02ea4e6d 100644 --- a/spanner/tests/unit/test_pool.py +++ b/spanner/tests/unit/test_pool.py @@ -156,8 +156,10 @@ def test_bind(self): self.assertEqual(pool.default_timeout, 10) self.assertTrue(pool._sessions.full()) + api = database.spanner_api + self.assertEqual(api.batch_create_sessions.call_count, 5) for session in SESSIONS: - self.assertTrue(session._created) + session.create.assert_not_called() def test_get_non_expired(self): pool = self._make_one(size=4) @@ -183,7 +185,7 @@ def test_get_expired(self): session = pool.get() self.assertIs(session, SESSIONS[4]) - self.assertTrue(session._created) + session.create.assert_called() self.assertTrue(SESSIONS[0]._exists_checked) self.assertFalse(pool._sessions.full()) @@ -243,8 +245,10 @@ def test_clear(self): pool.bind(database) self.assertTrue(pool._sessions.full()) + api = database.spanner_api + self.assertEqual(api.batch_create_sessions.call_count, 5) for session in SESSIONS: - self.assertTrue(session._created) + session.create.assert_not_called() pool.clear() @@ -286,7 +290,7 @@ def test_get_empty(self): self.assertIsInstance(session, _Session) self.assertIs(session._database, database) - self.assertTrue(session._created) + session.create.assert_called() self.assertTrue(pool._sessions.empty()) def test_get_non_empty_session_exists(self): @@ -299,7 +303,7 @@ def test_get_non_empty_session_exists(self): session = pool.get() self.assertIs(session, previous) - self.assertFalse(session._created) + session.create.assert_not_called() self.assertTrue(session._exists_checked) self.assertTrue(pool._sessions.empty()) @@ -316,7 +320,7 @@ def test_get_non_empty_session_expired(self): self.assertTrue(previous._exists_checked) self.assertIs(session, newborn) - self.assertTrue(session._created) + session.create.assert_called() self.assertFalse(session._exists_checked) self.assertTrue(pool._sessions.empty()) @@ -405,7 +409,6 @@ def test_bind(self): database = _Database("name") SESSIONS = [_Session(database)] * 10 database._sessions.extend(SESSIONS) - pool.bind(database) self.assertIs(pool._database, database) @@ -414,8 +417,10 @@ def test_bind(self): self.assertEqual(pool._delta.seconds, 3000) self.assertTrue(pool._sessions.full()) + api = database.spanner_api + self.assertEqual(api.batch_create_sessions.call_count, 5) for session in SESSIONS: - self.assertTrue(session._created) + session.create.assert_not_called() def test_get_hit_no_ping(self): pool = self._make_one(size=4) @@ -470,7 +475,7 @@ def test_get_hit_w_ping_expired(self): session = pool.get() self.assertIs(session, SESSIONS[4]) - self.assertTrue(session._created) + session.create.assert_called() self.assertTrue(SESSIONS[0]._exists_checked) self.assertFalse(pool._sessions.full()) @@ -538,8 +543,10 @@ def test_clear(self): pool.bind(database) self.assertTrue(pool._sessions.full()) + api = database.spanner_api + self.assertEqual(api.batch_create_sessions.call_count, 5) for session in SESSIONS: - self.assertTrue(session._created) + session.create.assert_not_called() pool.clear() @@ -595,7 +602,7 @@ def test_ping_oldest_stale_and_not_exists(self): pool.ping() self.assertTrue(SESSIONS[0]._exists_checked) - self.assertTrue(SESSIONS[1]._created) + SESSIONS[1].create.assert_called() class TestTransactionPingingPool(unittest.TestCase): @@ -635,7 +642,6 @@ def test_bind(self): database = _Database("name") SESSIONS = [_Session(database) for _ in range(10)] database._sessions.extend(SESSIONS) - pool.bind(database) self.assertIs(pool._database, database) @@ -644,8 +650,10 @@ def test_bind(self): self.assertEqual(pool._delta.seconds, 3000) self.assertTrue(pool._sessions.full()) + api = database.spanner_api + self.assertEqual(api.batch_create_sessions.call_count, 5) for session in SESSIONS: - self.assertTrue(session._created) + session.create.assert_not_called() txn = session._transaction self.assertTrue(txn._begun) @@ -671,8 +679,10 @@ def test_bind_w_timestamp_race(self): self.assertEqual(pool._delta.seconds, 3000) self.assertTrue(pool._sessions.full()) + api = database.spanner_api + self.assertEqual(api.batch_create_sessions.call_count, 5) for session in SESSIONS: - self.assertTrue(session._created) + session.create.assert_not_called() txn = session._transaction self.assertTrue(txn._begun) @@ -843,16 +853,13 @@ def __init__(self, database, exists=True, transaction=None): self._database = database self._exists = exists self._exists_checked = False - self._created = False + self.create = mock.Mock() self._deleted = False self._transaction = transaction def __lt__(self, other): return id(self) < id(other) - def create(self): - self._created = True - def exists(self): self._exists_checked = True return self._exists @@ -874,6 +881,22 @@ def __init__(self, name): self.name = name self._sessions = [] + def mock_batch_create_sessions(db, session_count=10, timeout=10, metadata=[]): + from google.cloud.spanner_v1.proto import spanner_pb2 + + response = spanner_pb2.BatchCreateSessionsResponse() + if session_count < 2: + response.session.add() + else: + response.session.add() + response.session.add() + return response + + from google.cloud.spanner_v1.gapic.spanner_client import SpannerClient + + self.spanner_api = mock.create_autospec(SpannerClient, instance=True) + self.spanner_api.batch_create_sessions.side_effect = mock_batch_create_sessions + def session(self): return self._sessions.pop() diff --git a/speech/google/cloud/speech_v1/proto/cloud_speech.proto b/speech/google/cloud/speech_v1/proto/cloud_speech.proto index 0887915a1e91..9553510ac8ac 100644 --- a/speech/google/cloud/speech_v1/proto/cloud_speech.proto +++ b/speech/google/cloud/speech_v1/proto/cloud_speech.proto @@ -366,14 +366,6 @@ message SpeakerDiarizationConfig { // flexibility by allowing the system to automatically determine the correct // number of speakers. If not set, the default value is 6. int32 max_speaker_count = 3; - - // A distinct integer value is assigned for every speaker within - // the audio. This field specifies which one of those speakers was detected to - // have spoken this word. Value ranges from '1' to diarization_speaker_count. - // speaker_tag is set if enable_speaker_diarization = 'true' and only in the - // top alternative. - int32 speaker_tag = 5 - [(google.api.field_behavior) = OUTPUT_ONLY]; } // Description of audio data to be recognized. @@ -752,4 +744,12 @@ message WordInfo { // The word corresponding to this set of information. string word = 3; + + // A distinct integer value is assigned for every speaker within + // the audio. This field specifies which one of those speakers was detected to + // have spoken this word. Value ranges from '1' to diarization_speaker_count. + // speaker_tag is set if enable_speaker_diarization = 'true' and only in the + // top alternative. + int32 speaker_tag = 5 + [(google.api.field_behavior) = OUTPUT_ONLY]; } diff --git a/speech/google/cloud/speech_v1/proto/cloud_speech_pb2.py b/speech/google/cloud/speech_v1/proto/cloud_speech_pb2.py index f75e917c180d..375bc8a1fd89 100644 --- a/speech/google/cloud/speech_v1/proto/cloud_speech_pb2.py +++ b/speech/google/cloud/speech_v1/proto/cloud_speech_pb2.py @@ -35,7 +35,7 @@ "\n\032com.google.cloud.speech.v1B\013SpeechProtoP\001Z\n\x06\x63onfig\x18\x01 \x01(\x0b\x32).google.cloud.speech.v1.RecognitionConfigB\x03\xe0\x41\x02\x12<\n\x05\x61udio\x18\x02 \x01(\x0b\x32(.google.cloud.speech.v1.RecognitionAudioB\x03\xe0\x41\x02"\x9b\x01\n\x1bLongRunningRecognizeRequest\x12>\n\x06\x63onfig\x18\x01 \x01(\x0b\x32).google.cloud.speech.v1.RecognitionConfigB\x03\xe0\x41\x02\x12<\n\x05\x61udio\x18\x02 \x01(\x0b\x32(.google.cloud.speech.v1.RecognitionAudioB\x03\xe0\x41\x02"\x99\x01\n\x19StreamingRecognizeRequest\x12N\n\x10streaming_config\x18\x01 \x01(\x0b\x32\x32.google.cloud.speech.v1.StreamingRecognitionConfigH\x00\x12\x17\n\raudio_content\x18\x02 \x01(\x0cH\x00\x42\x13\n\x11streaming_request"\x8f\x01\n\x1aStreamingRecognitionConfig\x12>\n\x06\x63onfig\x18\x01 \x01(\x0b\x32).google.cloud.speech.v1.RecognitionConfigB\x03\xe0\x41\x02\x12\x18\n\x10single_utterance\x18\x02 \x01(\x08\x12\x17\n\x0finterim_results\x18\x03 \x01(\x08"\xdf\x05\n\x11RecognitionConfig\x12I\n\x08\x65ncoding\x18\x01 \x01(\x0e\x32\x37.google.cloud.speech.v1.RecognitionConfig.AudioEncoding\x12\x19\n\x11sample_rate_hertz\x18\x02 \x01(\x05\x12\x1b\n\x13\x61udio_channel_count\x18\x07 \x01(\x05\x12/\n\'enable_separate_recognition_per_channel\x18\x0c \x01(\x08\x12\x1a\n\rlanguage_code\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x18\n\x10max_alternatives\x18\x04 \x01(\x05\x12\x18\n\x10profanity_filter\x18\x05 \x01(\x08\x12>\n\x0fspeech_contexts\x18\x06 \x03(\x0b\x32%.google.cloud.speech.v1.SpeechContext\x12 \n\x18\x65nable_word_time_offsets\x18\x08 \x01(\x08\x12$\n\x1c\x65nable_automatic_punctuation\x18\x0b \x01(\x08\x12L\n\x12\x64iarization_config\x18\x13 \x01(\x0b\x32\x30.google.cloud.speech.v1.SpeakerDiarizationConfig\x12=\n\x08metadata\x18\t \x01(\x0b\x32+.google.cloud.speech.v1.RecognitionMetadata\x12\r\n\x05model\x18\r \x01(\t\x12\x14\n\x0cuse_enhanced\x18\x0e \x01(\x08"\x8b\x01\n\rAudioEncoding\x12\x18\n\x14\x45NCODING_UNSPECIFIED\x10\x00\x12\x0c\n\x08LINEAR16\x10\x01\x12\x08\n\x04\x46LAC\x10\x02\x12\t\n\x05MULAW\x10\x03\x12\x07\n\x03\x41MR\x10\x04\x12\n\n\x06\x41MR_WB\x10\x05\x12\x0c\n\x08OGG_OPUS\x10\x06\x12\x1a\n\x16SPEEX_WITH_HEADER_BYTE\x10\x07"\x8e\x01\n\x18SpeakerDiarizationConfig\x12"\n\x1a\x65nable_speaker_diarization\x18\x01 \x01(\x08\x12\x19\n\x11min_speaker_count\x18\x02 \x01(\x05\x12\x19\n\x11max_speaker_count\x18\x03 \x01(\x05\x12\x18\n\x0bspeaker_tag\x18\x05 \x01(\x05\x42\x03\xe0\x41\x03"\xa0\x08\n\x13RecognitionMetadata\x12U\n\x10interaction_type\x18\x01 \x01(\x0e\x32;.google.cloud.speech.v1.RecognitionMetadata.InteractionType\x12$\n\x1cindustry_naics_code_of_audio\x18\x03 \x01(\r\x12[\n\x13microphone_distance\x18\x04 \x01(\x0e\x32>.google.cloud.speech.v1.RecognitionMetadata.MicrophoneDistance\x12Z\n\x13original_media_type\x18\x05 \x01(\x0e\x32=.google.cloud.speech.v1.RecognitionMetadata.OriginalMediaType\x12^\n\x15recording_device_type\x18\x06 \x01(\x0e\x32?.google.cloud.speech.v1.RecognitionMetadata.RecordingDeviceType\x12\x1d\n\x15recording_device_name\x18\x07 \x01(\t\x12\x1a\n\x12original_mime_type\x18\x08 \x01(\t\x12\x13\n\x0b\x61udio_topic\x18\n \x01(\t"\xc5\x01\n\x0fInteractionType\x12 \n\x1cINTERACTION_TYPE_UNSPECIFIED\x10\x00\x12\x0e\n\nDISCUSSION\x10\x01\x12\x10\n\x0cPRESENTATION\x10\x02\x12\x0e\n\nPHONE_CALL\x10\x03\x12\r\n\tVOICEMAIL\x10\x04\x12\x1b\n\x17PROFESSIONALLY_PRODUCED\x10\x05\x12\x10\n\x0cVOICE_SEARCH\x10\x06\x12\x11\n\rVOICE_COMMAND\x10\x07\x12\r\n\tDICTATION\x10\x08"d\n\x12MicrophoneDistance\x12#\n\x1fMICROPHONE_DISTANCE_UNSPECIFIED\x10\x00\x12\r\n\tNEARFIELD\x10\x01\x12\x0c\n\x08MIDFIELD\x10\x02\x12\x0c\n\x08\x46\x41RFIELD\x10\x03"N\n\x11OriginalMediaType\x12#\n\x1fORIGINAL_MEDIA_TYPE_UNSPECIFIED\x10\x00\x12\t\n\x05\x41UDIO\x10\x01\x12\t\n\x05VIDEO\x10\x02"\xa4\x01\n\x13RecordingDeviceType\x12%\n!RECORDING_DEVICE_TYPE_UNSPECIFIED\x10\x00\x12\x0e\n\nSMARTPHONE\x10\x01\x12\x06\n\x02PC\x10\x02\x12\x0e\n\nPHONE_LINE\x10\x03\x12\x0b\n\x07VEHICLE\x10\x04\x12\x18\n\x14OTHER_OUTDOOR_DEVICE\x10\x05\x12\x17\n\x13OTHER_INDOOR_DEVICE\x10\x06" \n\rSpeechContext\x12\x0f\n\x07phrases\x18\x01 \x03(\t"D\n\x10RecognitionAudio\x12\x11\n\x07\x63ontent\x18\x01 \x01(\x0cH\x00\x12\r\n\x03uri\x18\x02 \x01(\tH\x00\x42\x0e\n\x0c\x61udio_source"U\n\x11RecognizeResponse\x12@\n\x07results\x18\x02 \x03(\x0b\x32/.google.cloud.speech.v1.SpeechRecognitionResult"`\n\x1cLongRunningRecognizeResponse\x12@\n\x07results\x18\x02 \x03(\x0b\x32/.google.cloud.speech.v1.SpeechRecognitionResult"\x9e\x01\n\x1cLongRunningRecognizeMetadata\x12\x18\n\x10progress_percent\x18\x01 \x01(\x05\x12.\n\nstart_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x34\n\x10last_update_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"\xb1\x02\n\x1aStreamingRecognizeResponse\x12!\n\x05\x65rror\x18\x01 \x01(\x0b\x32\x12.google.rpc.Status\x12\x43\n\x07results\x18\x02 \x03(\x0b\x32\x32.google.cloud.speech.v1.StreamingRecognitionResult\x12]\n\x11speech_event_type\x18\x04 \x01(\x0e\x32\x42.google.cloud.speech.v1.StreamingRecognizeResponse.SpeechEventType"L\n\x0fSpeechEventType\x12\x1c\n\x18SPEECH_EVENT_UNSPECIFIED\x10\x00\x12\x1b\n\x17\x45ND_OF_SINGLE_UTTERANCE\x10\x01"\xf2\x01\n\x1aStreamingRecognitionResult\x12J\n\x0c\x61lternatives\x18\x01 \x03(\x0b\x32\x34.google.cloud.speech.v1.SpeechRecognitionAlternative\x12\x10\n\x08is_final\x18\x02 \x01(\x08\x12\x11\n\tstability\x18\x03 \x01(\x02\x12\x32\n\x0fresult_end_time\x18\x04 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x13\n\x0b\x63hannel_tag\x18\x05 \x01(\x05\x12\x1a\n\rlanguage_code\x18\x06 \x01(\tB\x03\xe0\x41\x03"z\n\x17SpeechRecognitionResult\x12J\n\x0c\x61lternatives\x18\x01 \x03(\x0b\x32\x34.google.cloud.speech.v1.SpeechRecognitionAlternative\x12\x13\n\x0b\x63hannel_tag\x18\x02 \x01(\x05"w\n\x1cSpeechRecognitionAlternative\x12\x12\n\ntranscript\x18\x01 \x01(\t\x12\x12\n\nconfidence\x18\x02 \x01(\x02\x12/\n\x05words\x18\x03 \x03(\x0b\x32 .google.cloud.speech.v1.WordInfo"t\n\x08WordInfo\x12-\n\nstart_time\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration\x12+\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x0c\n\x04word\x18\x03 \x01(\t2\xd1\x04\n\x06Speech\x12\x90\x01\n\tRecognize\x12(.google.cloud.speech.v1.RecognizeRequest\x1a).google.cloud.speech.v1.RecognizeResponse".\x82\xd3\xe4\x93\x02\x19"\x14/v1/speech:recognize:\x01*\xda\x41\x0c\x63onfig,audio\x12\xe4\x01\n\x14LongRunningRecognize\x12\x33.google.cloud.speech.v1.LongRunningRecognizeRequest\x1a\x1d.google.longrunning.Operation"x\x82\xd3\xe4\x93\x02$"\x1f/v1/speech:longrunningrecognize:\x01*\xda\x41\x0c\x63onfig,audio\xca\x41<\n\x1cLongRunningRecognizeResponse\x12\x1cLongRunningRecognizeMetadata\x12\x81\x01\n\x12StreamingRecognize\x12\x31.google.cloud.speech.v1.StreamingRecognizeRequest\x1a\x32.google.cloud.speech.v1.StreamingRecognizeResponse"\x00(\x01\x30\x01\x1aI\xca\x41\x15speech.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformBr\n\x1a\x63om.google.cloud.speech.v1B\x0bSpeechProtoP\x01Z\n\x06\x63onfig\x18\x01 \x01(\x0b\x32).google.cloud.speech.v1.RecognitionConfigB\x03\xe0\x41\x02\x12<\n\x05\x61udio\x18\x02 \x01(\x0b\x32(.google.cloud.speech.v1.RecognitionAudioB\x03\xe0\x41\x02"\x9b\x01\n\x1bLongRunningRecognizeRequest\x12>\n\x06\x63onfig\x18\x01 \x01(\x0b\x32).google.cloud.speech.v1.RecognitionConfigB\x03\xe0\x41\x02\x12<\n\x05\x61udio\x18\x02 \x01(\x0b\x32(.google.cloud.speech.v1.RecognitionAudioB\x03\xe0\x41\x02"\x99\x01\n\x19StreamingRecognizeRequest\x12N\n\x10streaming_config\x18\x01 \x01(\x0b\x32\x32.google.cloud.speech.v1.StreamingRecognitionConfigH\x00\x12\x17\n\raudio_content\x18\x02 \x01(\x0cH\x00\x42\x13\n\x11streaming_request"\x8f\x01\n\x1aStreamingRecognitionConfig\x12>\n\x06\x63onfig\x18\x01 \x01(\x0b\x32).google.cloud.speech.v1.RecognitionConfigB\x03\xe0\x41\x02\x12\x18\n\x10single_utterance\x18\x02 \x01(\x08\x12\x17\n\x0finterim_results\x18\x03 \x01(\x08"\xdf\x05\n\x11RecognitionConfig\x12I\n\x08\x65ncoding\x18\x01 \x01(\x0e\x32\x37.google.cloud.speech.v1.RecognitionConfig.AudioEncoding\x12\x19\n\x11sample_rate_hertz\x18\x02 \x01(\x05\x12\x1b\n\x13\x61udio_channel_count\x18\x07 \x01(\x05\x12/\n\'enable_separate_recognition_per_channel\x18\x0c \x01(\x08\x12\x1a\n\rlanguage_code\x18\x03 \x01(\tB\x03\xe0\x41\x02\x12\x18\n\x10max_alternatives\x18\x04 \x01(\x05\x12\x18\n\x10profanity_filter\x18\x05 \x01(\x08\x12>\n\x0fspeech_contexts\x18\x06 \x03(\x0b\x32%.google.cloud.speech.v1.SpeechContext\x12 \n\x18\x65nable_word_time_offsets\x18\x08 \x01(\x08\x12$\n\x1c\x65nable_automatic_punctuation\x18\x0b \x01(\x08\x12L\n\x12\x64iarization_config\x18\x13 \x01(\x0b\x32\x30.google.cloud.speech.v1.SpeakerDiarizationConfig\x12=\n\x08metadata\x18\t \x01(\x0b\x32+.google.cloud.speech.v1.RecognitionMetadata\x12\r\n\x05model\x18\r \x01(\t\x12\x14\n\x0cuse_enhanced\x18\x0e \x01(\x08"\x8b\x01\n\rAudioEncoding\x12\x18\n\x14\x45NCODING_UNSPECIFIED\x10\x00\x12\x0c\n\x08LINEAR16\x10\x01\x12\x08\n\x04\x46LAC\x10\x02\x12\t\n\x05MULAW\x10\x03\x12\x07\n\x03\x41MR\x10\x04\x12\n\n\x06\x41MR_WB\x10\x05\x12\x0c\n\x08OGG_OPUS\x10\x06\x12\x1a\n\x16SPEEX_WITH_HEADER_BYTE\x10\x07"t\n\x18SpeakerDiarizationConfig\x12"\n\x1a\x65nable_speaker_diarization\x18\x01 \x01(\x08\x12\x19\n\x11min_speaker_count\x18\x02 \x01(\x05\x12\x19\n\x11max_speaker_count\x18\x03 \x01(\x05"\xa0\x08\n\x13RecognitionMetadata\x12U\n\x10interaction_type\x18\x01 \x01(\x0e\x32;.google.cloud.speech.v1.RecognitionMetadata.InteractionType\x12$\n\x1cindustry_naics_code_of_audio\x18\x03 \x01(\r\x12[\n\x13microphone_distance\x18\x04 \x01(\x0e\x32>.google.cloud.speech.v1.RecognitionMetadata.MicrophoneDistance\x12Z\n\x13original_media_type\x18\x05 \x01(\x0e\x32=.google.cloud.speech.v1.RecognitionMetadata.OriginalMediaType\x12^\n\x15recording_device_type\x18\x06 \x01(\x0e\x32?.google.cloud.speech.v1.RecognitionMetadata.RecordingDeviceType\x12\x1d\n\x15recording_device_name\x18\x07 \x01(\t\x12\x1a\n\x12original_mime_type\x18\x08 \x01(\t\x12\x13\n\x0b\x61udio_topic\x18\n \x01(\t"\xc5\x01\n\x0fInteractionType\x12 \n\x1cINTERACTION_TYPE_UNSPECIFIED\x10\x00\x12\x0e\n\nDISCUSSION\x10\x01\x12\x10\n\x0cPRESENTATION\x10\x02\x12\x0e\n\nPHONE_CALL\x10\x03\x12\r\n\tVOICEMAIL\x10\x04\x12\x1b\n\x17PROFESSIONALLY_PRODUCED\x10\x05\x12\x10\n\x0cVOICE_SEARCH\x10\x06\x12\x11\n\rVOICE_COMMAND\x10\x07\x12\r\n\tDICTATION\x10\x08"d\n\x12MicrophoneDistance\x12#\n\x1fMICROPHONE_DISTANCE_UNSPECIFIED\x10\x00\x12\r\n\tNEARFIELD\x10\x01\x12\x0c\n\x08MIDFIELD\x10\x02\x12\x0c\n\x08\x46\x41RFIELD\x10\x03"N\n\x11OriginalMediaType\x12#\n\x1fORIGINAL_MEDIA_TYPE_UNSPECIFIED\x10\x00\x12\t\n\x05\x41UDIO\x10\x01\x12\t\n\x05VIDEO\x10\x02"\xa4\x01\n\x13RecordingDeviceType\x12%\n!RECORDING_DEVICE_TYPE_UNSPECIFIED\x10\x00\x12\x0e\n\nSMARTPHONE\x10\x01\x12\x06\n\x02PC\x10\x02\x12\x0e\n\nPHONE_LINE\x10\x03\x12\x0b\n\x07VEHICLE\x10\x04\x12\x18\n\x14OTHER_OUTDOOR_DEVICE\x10\x05\x12\x17\n\x13OTHER_INDOOR_DEVICE\x10\x06" \n\rSpeechContext\x12\x0f\n\x07phrases\x18\x01 \x03(\t"D\n\x10RecognitionAudio\x12\x11\n\x07\x63ontent\x18\x01 \x01(\x0cH\x00\x12\r\n\x03uri\x18\x02 \x01(\tH\x00\x42\x0e\n\x0c\x61udio_source"U\n\x11RecognizeResponse\x12@\n\x07results\x18\x02 \x03(\x0b\x32/.google.cloud.speech.v1.SpeechRecognitionResult"`\n\x1cLongRunningRecognizeResponse\x12@\n\x07results\x18\x02 \x03(\x0b\x32/.google.cloud.speech.v1.SpeechRecognitionResult"\x9e\x01\n\x1cLongRunningRecognizeMetadata\x12\x18\n\x10progress_percent\x18\x01 \x01(\x05\x12.\n\nstart_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x34\n\x10last_update_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"\xb1\x02\n\x1aStreamingRecognizeResponse\x12!\n\x05\x65rror\x18\x01 \x01(\x0b\x32\x12.google.rpc.Status\x12\x43\n\x07results\x18\x02 \x03(\x0b\x32\x32.google.cloud.speech.v1.StreamingRecognitionResult\x12]\n\x11speech_event_type\x18\x04 \x01(\x0e\x32\x42.google.cloud.speech.v1.StreamingRecognizeResponse.SpeechEventType"L\n\x0fSpeechEventType\x12\x1c\n\x18SPEECH_EVENT_UNSPECIFIED\x10\x00\x12\x1b\n\x17\x45ND_OF_SINGLE_UTTERANCE\x10\x01"\xf2\x01\n\x1aStreamingRecognitionResult\x12J\n\x0c\x61lternatives\x18\x01 \x03(\x0b\x32\x34.google.cloud.speech.v1.SpeechRecognitionAlternative\x12\x10\n\x08is_final\x18\x02 \x01(\x08\x12\x11\n\tstability\x18\x03 \x01(\x02\x12\x32\n\x0fresult_end_time\x18\x04 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x13\n\x0b\x63hannel_tag\x18\x05 \x01(\x05\x12\x1a\n\rlanguage_code\x18\x06 \x01(\tB\x03\xe0\x41\x03"z\n\x17SpeechRecognitionResult\x12J\n\x0c\x61lternatives\x18\x01 \x03(\x0b\x32\x34.google.cloud.speech.v1.SpeechRecognitionAlternative\x12\x13\n\x0b\x63hannel_tag\x18\x02 \x01(\x05"w\n\x1cSpeechRecognitionAlternative\x12\x12\n\ntranscript\x18\x01 \x01(\t\x12\x12\n\nconfidence\x18\x02 \x01(\x02\x12/\n\x05words\x18\x03 \x03(\x0b\x32 .google.cloud.speech.v1.WordInfo"\x8e\x01\n\x08WordInfo\x12-\n\nstart_time\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration\x12+\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x0c\n\x04word\x18\x03 \x01(\t\x12\x18\n\x0bspeaker_tag\x18\x05 \x01(\x05\x42\x03\xe0\x41\x03\x32\xd1\x04\n\x06Speech\x12\x90\x01\n\tRecognize\x12(.google.cloud.speech.v1.RecognizeRequest\x1a).google.cloud.speech.v1.RecognizeResponse".\x82\xd3\xe4\x93\x02\x19"\x14/v1/speech:recognize:\x01*\xda\x41\x0c\x63onfig,audio\x12\xe4\x01\n\x14LongRunningRecognize\x12\x33.google.cloud.speech.v1.LongRunningRecognizeRequest\x1a\x1d.google.longrunning.Operation"x\x82\xd3\xe4\x93\x02$"\x1f/v1/speech:longrunningrecognize:\x01*\xda\x41\x0c\x63onfig,audio\xca\x41<\n\x1cLongRunningRecognizeResponse\x12\x1cLongRunningRecognizeMetadata\x12\x81\x01\n\x12StreamingRecognize\x12\x31.google.cloud.speech.v1.StreamingRecognizeRequest\x1a\x32.google.cloud.speech.v1.StreamingRecognizeResponse"\x00(\x01\x30\x01\x1aI\xca\x41\x15speech.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformBr\n\x1a\x63om.google.cloud.speech.v1B\x0bSpeechProtoP\x01Z.google.cloud.talent.v4beta1.CompensationInfo.CompensationUnitB\x03\xe0\x41\x02\x12N\n\x05range\x18\x03 \x01(\x0b\x32?.google.cloud.talent.v4beta1.CompensationInfo.CompensationRange\x12\x38\n0include_jobs_with_unspecified_compensation_range\x18\x04 \x01(\x08"\x86\x01\n\nFilterType\x12\x1b\n\x17\x46ILTER_TYPE_UNSPECIFIED\x10\x00\x12\r\n\tUNIT_ONLY\x10\x01\x12\x13\n\x0fUNIT_AND_AMOUNT\x10\x02\x12\x1a\n\x16\x41NNUALIZED_BASE_AMOUNT\x10\x03\x12\x1b\n\x17\x41NNUALIZED_TOTAL_AMOUNT\x10\x04"\xcb\x03\n\rCommuteFilter\x12G\n\x0e\x63ommute_method\x18\x01 \x01(\x0e\x32*.google.cloud.talent.v4beta1.CommuteMethodB\x03\xe0\x41\x02\x12\x33\n\x11start_coordinates\x18\x02 \x01(\x0b\x32\x13.google.type.LatLngB\x03\xe0\x41\x02\x12\x37\n\x0ftravel_duration\x18\x03 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x02\x12!\n\x19\x61llow_imprecise_addresses\x18\x04 \x01(\x08\x12N\n\x0croad_traffic\x18\x05 \x01(\x0e\x32\x36.google.cloud.talent.v4beta1.CommuteFilter.RoadTrafficH\x00\x12\x30\n\x0e\x64\x65parture_time\x18\x06 \x01(\x0b\x32\x16.google.type.TimeOfDayH\x00"L\n\x0bRoadTraffic\x12\x1c\n\x18ROAD_TRAFFIC_UNSPECIFIED\x10\x00\x12\x10\n\x0cTRAFFIC_FREE\x10\x01\x12\r\n\tBUSY_HOUR\x10\x02\x42\x10\n\x0etraffic_option"9\n\x0eJobTitleFilter\x12\x16\n\tjob_title\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x0f\n\x07negated\x18\x02 \x01(\x08"2\n\x0bSkillFilter\x12\x12\n\x05skill\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x0f\n\x07negated\x18\x02 \x01(\x08"\xa6\x02\n\x0e\x45mployerFilter\x12\x15\n\x08\x65mployer\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12L\n\x04mode\x18\x02 \x01(\x0e\x32>.google.cloud.talent.v4beta1.EmployerFilter.EmployerFilterMode\x12\x0f\n\x07negated\x18\x03 \x01(\x08"\x9d\x01\n\x12\x45mployerFilterMode\x12$\n EMPLOYER_FILTER_MODE_UNSPECIFIED\x10\x00\x12\x1a\n\x16\x41LL_EMPLOYMENT_RECORDS\x10\x01\x12#\n\x1f\x43URRENT_EMPLOYMENT_RECORDS_ONLY\x10\x02\x12 \n\x1cPAST_EMPLOYMENT_RECORDS_ONLY\x10\x03"\x88\x01\n\x0f\x45\x64ucationFilter\x12\x0e\n\x06school\x18\x01 \x01(\t\x12\x16\n\x0e\x66ield_of_study\x18\x02 \x01(\t\x12<\n\x0b\x64\x65gree_type\x18\x03 \x01(\x0e\x32\'.google.cloud.talent.v4beta1.DegreeType\x12\x0f\n\x07negated\x18\x06 \x01(\x08"|\n\x14WorkExperienceFilter\x12\x31\n\x0emin_experience\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x31\n\x0emax_experience\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration"c\n\x15\x41pplicationDateFilter\x12%\n\nstart_date\x18\x01 \x01(\x0b\x32\x11.google.type.Date\x12#\n\x08\x65nd_date\x18\x02 \x01(\x0b\x32\x11.google.type.Date"L\n\x1d\x41pplicationOutcomeNotesFilter\x12\x1a\n\routcome_notes\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x0f\n\x07negated\x18\x02 \x01(\x08"V\n\x14\x41pplicationJobFilter\x12\x1a\n\x12job_requisition_id\x18\x02 \x01(\t\x12\x11\n\tjob_title\x18\x03 \x01(\t\x12\x0f\n\x07negated\x18\x04 \x01(\x08"\xfc\x01\n\nTimeFilter\x12.\n\nstart_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x45\n\ntime_field\x18\x03 \x01(\x0e\x32\x31.google.cloud.talent.v4beta1.TimeFilter.TimeField"I\n\tTimeField\x12\x1a\n\x16TIME_FIELD_UNSPECIFIED\x10\x00\x12\x0f\n\x0b\x43REATE_TIME\x10\x01\x12\x0f\n\x0bUPDATE_TIME\x10\x02"2\n\x1b\x43\x61ndidateAvailabilityFilter\x12\x0f\n\x07negated\x18\x01 \x01(\x08:\x02\x18\x01"\xb6\x01\n\x12\x41vailabilityFilter\x12M\n\x0bsignal_type\x18\x01 \x01(\x0e\x32\x33.google.cloud.talent.v4beta1.AvailabilitySignalTypeB\x03\xe0\x41\x02\x12?\n\x05range\x18\x02 \x01(\x0b\x32+.google.cloud.talent.v4beta1.TimestampRangeB\x03\xe0\x41\x02\x12\x10\n\x08required\x18\x03 \x01(\x08",\n\x10PersonNameFilter\x12\x18\n\x0bperson_name\x18\x01 \x01(\tB\x03\xe0\x41\x02\x42z\n\x1f\x63om.google.cloud.talent.v4beta1B\x0c\x46iltersProtoP\x01ZAgoogle.golang.org/genproto/googleapis/cloud/talent/v4beta1;talent\xa2\x02\x03\x43TSb\x06proto3' + '\n/google/cloud/talent_v4beta1/proto/filters.proto\x12\x1bgoogle.cloud.talent.v4beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a.google/cloud/talent_v4beta1/proto/common.proto\x1a\x1egoogle/protobuf/duration.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\x1a\x16google/type/date.proto\x1a\x18google/type/latlng.proto\x1a\x1bgoogle/type/timeofday.proto"\x80\x05\n\x08JobQuery\x12\r\n\x05query\x18\x01 \x01(\t\x12\x1b\n\x13query_language_code\x18\x0e \x01(\t\x12\x11\n\tcompanies\x18\x02 \x03(\t\x12\x45\n\x10location_filters\x18\x03 \x03(\x0b\x32+.google.cloud.talent.v4beta1.LocationFilter\x12@\n\x0ejob_categories\x18\x04 \x03(\x0e\x32(.google.cloud.talent.v4beta1.JobCategory\x12\x42\n\x0e\x63ommute_filter\x18\x05 \x01(\x0b\x32*.google.cloud.talent.v4beta1.CommuteFilter\x12\x1d\n\x15\x63ompany_display_names\x18\x06 \x03(\t\x12L\n\x13\x63ompensation_filter\x18\x07 \x01(\x0b\x32/.google.cloud.talent.v4beta1.CompensationFilter\x12\x1f\n\x17\x63ustom_attribute_filter\x18\x08 \x01(\t\x12\x1b\n\x13\x64isable_spell_check\x18\t \x01(\x08\x12\x45\n\x10\x65mployment_types\x18\n \x03(\x0e\x32+.google.cloud.talent.v4beta1.EmploymentType\x12\x16\n\x0elanguage_codes\x18\x0b \x03(\t\x12G\n\x12publish_time_range\x18\x0c \x01(\x0b\x32+.google.cloud.talent.v4beta1.TimestampRange\x12\x15\n\rexcluded_jobs\x18\r \x03(\t"\xf5\x08\n\x0cProfileQuery\x12\r\n\x05query\x18\x01 \x01(\t\x12\x45\n\x10location_filters\x18\x02 \x03(\x0b\x32+.google.cloud.talent.v4beta1.LocationFilter\x12\x46\n\x11job_title_filters\x18\x03 \x03(\x0b\x32+.google.cloud.talent.v4beta1.JobTitleFilter\x12\x45\n\x10\x65mployer_filters\x18\x04 \x03(\x0b\x32+.google.cloud.talent.v4beta1.EmployerFilter\x12G\n\x11\x65\x64ucation_filters\x18\x05 \x03(\x0b\x32,.google.cloud.talent.v4beta1.EducationFilter\x12?\n\rskill_filters\x18\x06 \x03(\x0b\x32(.google.cloud.talent.v4beta1.SkillFilter\x12Q\n\x16work_experience_filter\x18\x07 \x03(\x0b\x32\x31.google.cloud.talent.v4beta1.WorkExperienceFilter\x12=\n\x0ctime_filters\x18\x08 \x03(\x0b\x32\'.google.cloud.talent.v4beta1.TimeFilter\x12\x32\n\x0ehirable_filter\x18\t \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x12T\n\x18\x61pplication_date_filters\x18\n \x03(\x0b\x32\x32.google.cloud.talent.v4beta1.ApplicationDateFilter\x12\x65\n!application_outcome_notes_filters\x18\x0b \x03(\x0b\x32:.google.cloud.talent.v4beta1.ApplicationOutcomeNotesFilter\x12R\n\x17\x61pplication_job_filters\x18\r \x03(\x0b\x32\x31.google.cloud.talent.v4beta1.ApplicationJobFilter\x12\x1f\n\x17\x63ustom_attribute_filter\x18\x0f \x01(\t\x12\x63\n\x1d\x63\x61ndidate_availability_filter\x18\x10 \x01(\x0b\x32\x38.google.cloud.talent.v4beta1.CandidateAvailabilityFilterB\x02\x18\x01\x12M\n\x14\x61vailability_filters\x18\x12 \x03(\x0b\x32/.google.cloud.talent.v4beta1.AvailabilityFilter\x12J\n\x13person_name_filters\x18\x11 \x03(\x0b\x32-.google.cloud.talent.v4beta1.PersonNameFilter"\xdf\x02\n\x0eLocationFilter\x12\x0f\n\x07\x61\x64\x64ress\x18\x01 \x01(\t\x12\x13\n\x0bregion_code\x18\x02 \x01(\t\x12$\n\x07lat_lng\x18\x03 \x01(\x0b\x32\x13.google.type.LatLng\x12\x19\n\x11\x64istance_in_miles\x18\x04 \x01(\x01\x12\x61\n\x16telecommute_preference\x18\x05 \x01(\x0e\x32\x41.google.cloud.talent.v4beta1.LocationFilter.TelecommutePreference\x12\x0f\n\x07negated\x18\x06 \x01(\x08"r\n\x15TelecommutePreference\x12&\n"TELECOMMUTE_PREFERENCE_UNSPECIFIED\x10\x00\x12\x18\n\x14TELECOMMUTE_EXCLUDED\x10\x01\x12\x17\n\x13TELECOMMUTE_ALLOWED\x10\x02"\xca\x03\n\x12\x43ompensationFilter\x12M\n\x04type\x18\x01 \x01(\x0e\x32:.google.cloud.talent.v4beta1.CompensationFilter.FilterTypeB\x03\xe0\x41\x02\x12R\n\x05units\x18\x02 \x03(\x0e\x32>.google.cloud.talent.v4beta1.CompensationInfo.CompensationUnitB\x03\xe0\x41\x02\x12N\n\x05range\x18\x03 \x01(\x0b\x32?.google.cloud.talent.v4beta1.CompensationInfo.CompensationRange\x12\x38\n0include_jobs_with_unspecified_compensation_range\x18\x04 \x01(\x08"\x86\x01\n\nFilterType\x12\x1b\n\x17\x46ILTER_TYPE_UNSPECIFIED\x10\x00\x12\r\n\tUNIT_ONLY\x10\x01\x12\x13\n\x0fUNIT_AND_AMOUNT\x10\x02\x12\x1a\n\x16\x41NNUALIZED_BASE_AMOUNT\x10\x03\x12\x1b\n\x17\x41NNUALIZED_TOTAL_AMOUNT\x10\x04"\xcb\x03\n\rCommuteFilter\x12G\n\x0e\x63ommute_method\x18\x01 \x01(\x0e\x32*.google.cloud.talent.v4beta1.CommuteMethodB\x03\xe0\x41\x02\x12\x33\n\x11start_coordinates\x18\x02 \x01(\x0b\x32\x13.google.type.LatLngB\x03\xe0\x41\x02\x12\x37\n\x0ftravel_duration\x18\x03 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x02\x12!\n\x19\x61llow_imprecise_addresses\x18\x04 \x01(\x08\x12N\n\x0croad_traffic\x18\x05 \x01(\x0e\x32\x36.google.cloud.talent.v4beta1.CommuteFilter.RoadTrafficH\x00\x12\x30\n\x0e\x64\x65parture_time\x18\x06 \x01(\x0b\x32\x16.google.type.TimeOfDayH\x00"L\n\x0bRoadTraffic\x12\x1c\n\x18ROAD_TRAFFIC_UNSPECIFIED\x10\x00\x12\x10\n\x0cTRAFFIC_FREE\x10\x01\x12\r\n\tBUSY_HOUR\x10\x02\x42\x10\n\x0etraffic_option"9\n\x0eJobTitleFilter\x12\x16\n\tjob_title\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x0f\n\x07negated\x18\x02 \x01(\x08"2\n\x0bSkillFilter\x12\x12\n\x05skill\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x0f\n\x07negated\x18\x02 \x01(\x08"\xa6\x02\n\x0e\x45mployerFilter\x12\x15\n\x08\x65mployer\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12L\n\x04mode\x18\x02 \x01(\x0e\x32>.google.cloud.talent.v4beta1.EmployerFilter.EmployerFilterMode\x12\x0f\n\x07negated\x18\x03 \x01(\x08"\x9d\x01\n\x12\x45mployerFilterMode\x12$\n EMPLOYER_FILTER_MODE_UNSPECIFIED\x10\x00\x12\x1a\n\x16\x41LL_EMPLOYMENT_RECORDS\x10\x01\x12#\n\x1f\x43URRENT_EMPLOYMENT_RECORDS_ONLY\x10\x02\x12 \n\x1cPAST_EMPLOYMENT_RECORDS_ONLY\x10\x03"\x88\x01\n\x0f\x45\x64ucationFilter\x12\x0e\n\x06school\x18\x01 \x01(\t\x12\x16\n\x0e\x66ield_of_study\x18\x02 \x01(\t\x12<\n\x0b\x64\x65gree_type\x18\x03 \x01(\x0e\x32\'.google.cloud.talent.v4beta1.DegreeType\x12\x0f\n\x07negated\x18\x06 \x01(\x08"|\n\x14WorkExperienceFilter\x12\x31\n\x0emin_experience\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x31\n\x0emax_experience\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration"c\n\x15\x41pplicationDateFilter\x12%\n\nstart_date\x18\x01 \x01(\x0b\x32\x11.google.type.Date\x12#\n\x08\x65nd_date\x18\x02 \x01(\x0b\x32\x11.google.type.Date"L\n\x1d\x41pplicationOutcomeNotesFilter\x12\x1a\n\routcome_notes\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x0f\n\x07negated\x18\x02 \x01(\x08"V\n\x14\x41pplicationJobFilter\x12\x1a\n\x12job_requisition_id\x18\x02 \x01(\t\x12\x11\n\tjob_title\x18\x03 \x01(\t\x12\x0f\n\x07negated\x18\x04 \x01(\x08"\xfc\x01\n\nTimeFilter\x12.\n\nstart_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x45\n\ntime_field\x18\x03 \x01(\x0e\x32\x31.google.cloud.talent.v4beta1.TimeFilter.TimeField"I\n\tTimeField\x12\x1a\n\x16TIME_FIELD_UNSPECIFIED\x10\x00\x12\x0f\n\x0b\x43REATE_TIME\x10\x01\x12\x0f\n\x0bUPDATE_TIME\x10\x02"2\n\x1b\x43\x61ndidateAvailabilityFilter\x12\x0f\n\x07negated\x18\x01 \x01(\x08:\x02\x18\x01"\xb6\x01\n\x12\x41vailabilityFilter\x12M\n\x0bsignal_type\x18\x01 \x01(\x0e\x32\x33.google.cloud.talent.v4beta1.AvailabilitySignalTypeB\x03\xe0\x41\x02\x12?\n\x05range\x18\x02 \x01(\x0b\x32+.google.cloud.talent.v4beta1.TimestampRangeB\x03\xe0\x41\x02\x12\x10\n\x08required\x18\x03 \x01(\x08",\n\x10PersonNameFilter\x12\x18\n\x0bperson_name\x18\x01 \x01(\tB\x03\xe0\x41\x02\x42z\n\x1f\x63om.google.cloud.talent.v4beta1B\x0c\x46iltersProtoP\x01ZAgoogle.golang.org/genproto/googleapis/cloud/talent/v4beta1;talent\xa2\x02\x03\x43TSb\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, google_dot_cloud_dot_talent__v4beta1_dot_proto_dot_common__pb2.DESCRIPTOR, google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, + google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR, google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, google_dot_protobuf_dot_wrappers__pb2.DESCRIPTOR, google_dot_type_dot_date__pb2.DESCRIPTOR, @@ -82,8 +84,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=2363, - serialized_end=2477, + serialized_start=2426, + serialized_end=2540, ) _sym_db.RegisterEnumDescriptor(_LOCATIONFILTER_TELECOMMUTEPREFERENCE) @@ -127,8 +129,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=2804, - serialized_end=2938, + serialized_start=2867, + serialized_end=3001, ) _sym_db.RegisterEnumDescriptor(_COMPENSATIONFILTER_FILTERTYPE) @@ -154,8 +156,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=3306, - serialized_end=3382, + serialized_start=3369, + serialized_end=3445, ) _sym_db.RegisterEnumDescriptor(_COMMUTEFILTER_ROADTRAFFIC) @@ -196,8 +198,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=3651, - serialized_end=3808, + serialized_start=3714, + serialized_end=3871, ) _sym_db.RegisterEnumDescriptor(_EMPLOYERFILTER_EMPLOYERFILTERMODE) @@ -223,8 +225,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=4522, - serialized_end=4595, + serialized_start=4585, + serialized_end=4658, ) _sym_db.RegisterEnumDescriptor(_TIMEFILTER_TIMEFIELD) @@ -254,10 +256,28 @@ serialized_options=None, file=DESCRIPTOR, ), + _descriptor.FieldDescriptor( + name="query_language_code", + full_name="google.cloud.talent.v4beta1.JobQuery.query_language_code", + index=1, + number=14, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), _descriptor.FieldDescriptor( name="companies", full_name="google.cloud.talent.v4beta1.JobQuery.companies", - index=1, + index=2, number=2, type=9, cpp_type=9, @@ -275,7 +295,7 @@ _descriptor.FieldDescriptor( name="location_filters", full_name="google.cloud.talent.v4beta1.JobQuery.location_filters", - index=2, + index=3, number=3, type=11, cpp_type=10, @@ -293,7 +313,7 @@ _descriptor.FieldDescriptor( name="job_categories", full_name="google.cloud.talent.v4beta1.JobQuery.job_categories", - index=3, + index=4, number=4, type=14, cpp_type=8, @@ -311,7 +331,7 @@ _descriptor.FieldDescriptor( name="commute_filter", full_name="google.cloud.talent.v4beta1.JobQuery.commute_filter", - index=4, + index=5, number=5, type=11, cpp_type=10, @@ -329,7 +349,7 @@ _descriptor.FieldDescriptor( name="company_display_names", full_name="google.cloud.talent.v4beta1.JobQuery.company_display_names", - index=5, + index=6, number=6, type=9, cpp_type=9, @@ -347,7 +367,7 @@ _descriptor.FieldDescriptor( name="compensation_filter", full_name="google.cloud.talent.v4beta1.JobQuery.compensation_filter", - index=6, + index=7, number=7, type=11, cpp_type=10, @@ -365,7 +385,7 @@ _descriptor.FieldDescriptor( name="custom_attribute_filter", full_name="google.cloud.talent.v4beta1.JobQuery.custom_attribute_filter", - index=7, + index=8, number=8, type=9, cpp_type=9, @@ -383,7 +403,7 @@ _descriptor.FieldDescriptor( name="disable_spell_check", full_name="google.cloud.talent.v4beta1.JobQuery.disable_spell_check", - index=8, + index=9, number=9, type=8, cpp_type=7, @@ -401,7 +421,7 @@ _descriptor.FieldDescriptor( name="employment_types", full_name="google.cloud.talent.v4beta1.JobQuery.employment_types", - index=9, + index=10, number=10, type=14, cpp_type=8, @@ -419,7 +439,7 @@ _descriptor.FieldDescriptor( name="language_codes", full_name="google.cloud.talent.v4beta1.JobQuery.language_codes", - index=10, + index=11, number=11, type=9, cpp_type=9, @@ -437,7 +457,7 @@ _descriptor.FieldDescriptor( name="publish_time_range", full_name="google.cloud.talent.v4beta1.JobQuery.publish_time_range", - index=11, + index=12, number=12, type=11, cpp_type=10, @@ -455,7 +475,7 @@ _descriptor.FieldDescriptor( name="excluded_jobs", full_name="google.cloud.talent.v4beta1.JobQuery.excluded_jobs", - index=12, + index=13, number=13, type=9, cpp_type=9, @@ -479,8 +499,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=368, - serialized_end=979, + serialized_start=402, + serialized_end=1042, ) @@ -788,8 +808,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=982, - serialized_end=2123, + serialized_start=1045, + serialized_end=2186, ) @@ -917,8 +937,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2126, - serialized_end=2477, + serialized_start=2189, + serialized_end=2540, ) @@ -1010,8 +1030,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2480, - serialized_end=2938, + serialized_start=2543, + serialized_end=3001, ) @@ -1147,8 +1167,8 @@ fields=[], ) ], - serialized_start=2941, - serialized_end=3400, + serialized_start=3004, + serialized_end=3463, ) @@ -1204,8 +1224,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3402, - serialized_end=3459, + serialized_start=3465, + serialized_end=3522, ) @@ -1261,8 +1281,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3461, - serialized_end=3511, + serialized_start=3524, + serialized_end=3574, ) @@ -1336,8 +1356,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3514, - serialized_end=3808, + serialized_start=3577, + serialized_end=3871, ) @@ -1429,8 +1449,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3811, - serialized_end=3947, + serialized_start=3874, + serialized_end=4010, ) @@ -1486,8 +1506,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3949, - serialized_end=4073, + serialized_start=4012, + serialized_end=4136, ) @@ -1543,8 +1563,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4075, - serialized_end=4174, + serialized_start=4138, + serialized_end=4237, ) @@ -1600,8 +1620,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4176, - serialized_end=4252, + serialized_start=4239, + serialized_end=4315, ) @@ -1675,8 +1695,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4254, - serialized_end=4340, + serialized_start=4317, + serialized_end=4403, ) @@ -1750,8 +1770,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4343, - serialized_end=4595, + serialized_start=4406, + serialized_end=4658, ) @@ -1789,8 +1809,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4597, - serialized_end=4647, + serialized_start=4660, + serialized_end=4710, ) @@ -1864,8 +1884,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4650, - serialized_end=4832, + serialized_start=4713, + serialized_end=4895, ) @@ -1903,8 +1923,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4834, - serialized_end=4878, + serialized_start=4897, + serialized_end=4941, ) _JOBQUERY.fields_by_name["location_filters"].message_type = _LOCATIONFILTER @@ -2070,6 +2090,15 @@ The query string that matches against the job title, description, and location fields. The maximum number of allowed characters is 255. + query_language_code: + The language code of + [query][google.cloud.talent.v4beta1.JobQuery.query]. For + example, "en-US". This field helps to better interpret the + query. If a value isn't specified, the query language code is + automatically detected, which may not be accurate. Language + code should be in BCP-47 format, such as "en-US" or "sr-Latn". + For more information, see `Tags for Identifying Languages + `__. companies: This filter specifies the company entities to search against. If a value isn't specified, jobs are searched for against all @@ -2197,26 +2226,26 @@ .address] and [LocationFilter.lat\_lng][google.cloud.talent.v4 beta1.LocationFilter.lat\_lng] are provided, an error is thrown. The following logic is used to determine which - locations in the profile to filter against: 1. All of the - profile's geocoded [Profile.addresses][google.cloud.talent.v4b - eta1.Profile.addresses] where + locations in the profile to filter against: 1. All of the + profile's geocoded [Profile.addresses][google.cloud.talent. + v4beta1.Profile.addresses] where [Address.usage][google.cloud.talent.v4beta1.Address.usage] is PERSONAL and [Address.current][google.cloud.talent.v4beta1.Address.current] - is true. 2. If the above set of locations is empty, all of the - profile's geocoded [Profile.addresses][google.cloud.talent.v4b - eta1.Profile.addresses] where + is true. 2. If the above set of locations is empty, all of + the profile's geocoded [Profile.addresses][google.cloud.tal + ent.v4beta1.Profile.addresses] where [Address.usage][google.cloud.talent.v4beta1.Address.usage] is CONTACT\_INFO\_USAGE\_UNSPECIFIED and [Address.current][google.cloud.talent.v4beta1.Address.current] - is true. 3. If the above set of locations is empty, all of the - profile's geocoded [Profile.addresses][google.cloud.talent.v4b - eta1.Profile.addresses] where + is true. 3. If the above set of locations is empty, all of + the profile's geocoded [Profile.addresses][google.cloud.tal + ent.v4beta1.Profile.addresses] where [Address.usage][google.cloud.talent.v4beta1.Address.usage] is PERSONAL or CONTACT\_INFO\_USAGE\_UNSPECIFIED and [Address.current][google.cloud.talent.v4beta1.Address.current] - is not set. This means that any profiles without any [Profile - .addresses][google.cloud.talent.v4beta1.Profile.addresses] + is not set. This means that any profiles without any [Prof + ile.addresses][google.cloud.talent.v4beta1.Profile.addresses] that match any of the above criteria will not be included in a search with location filter. Furthermore, any [Profile.address es][google.cloud.talent.v4beta1.Profile.addresses] where diff --git a/talent/setup.py b/talent/setup.py index 0476b9315445..1889a4a69be7 100644 --- a/talent/setup.py +++ b/talent/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-talent" description = "Google Cloud Talent Solution API client library" -version = "0.3.0" +version = "0.4.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' diff --git a/talent/synth.metadata b/talent/synth.metadata index f125e93c1ac5..f17be2929a12 100644 --- a/talent/synth.metadata +++ b/talent/synth.metadata @@ -1,26 +1,26 @@ { - "updateTime": "2019-09-20T12:33:12.721312Z", + "updateTime": "2019-10-30T12:33:54.698582Z", "sources": [ { "generator": { "name": "artman", - "version": "0.36.3", - "dockerImage": "googleapis/artman@sha256:66ca01f27ef7dc50fbfb7743b67028115a6a8acf43b2d82f9fc826de008adac4" + "version": "0.41.0", + "dockerImage": "googleapis/artman@sha256:75b38a3b073a7b243545f2332463096624c802bb1e56b8cb6f22ba1ecd325fa9" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "2e02174cad304dac67a40e1f63885964c4db91b2", - "internalRef": "270249401" + "sha": "7c4cf35d5fe3b8ad664bd219edd6d9f28a788b64", + "internalRef": "277334937" } }, { "template": { "name": "python_library", "origin": "synthtool.gcp", - "version": "2019.5.2" + "version": "2019.10.17" } } ], diff --git a/tasks/docs/conf.py b/tasks/docs/conf.py index 67f270e847d1..3435ea280162 100644 --- a/tasks/docs/conf.py +++ b/tasks/docs/conf.py @@ -338,7 +338,7 @@ "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("https://requests.kennethreitz.org/en/stable/", None), + "requests": ("https://requests.kennethreitz.org/en/master/", None), "fastavro": ("https://fastavro.readthedocs.io/en/stable/", None), "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), } diff --git a/tasks/google/cloud/tasks_v2/gapic/cloud_tasks_client.py b/tasks/google/cloud/tasks_v2/gapic/cloud_tasks_client.py index 078af3347965..7c530a809640 100644 --- a/tasks/google/cloud/tasks_v2/gapic/cloud_tasks_client.py +++ b/tasks/google/cloud/tasks_v2/gapic/cloud_tasks_client.py @@ -285,13 +285,13 @@ def list_queues( streaming is performed per-page, this determines the maximum number of resources in a page. retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.api_core.page_iterator.PageIterator` instance. @@ -371,13 +371,13 @@ def get_queue( name (str): Required. The resource name of the queue. For example: ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2.types.Queue` instance. @@ -464,13 +464,13 @@ def create_queue( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.tasks_v2.types.Queue` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2.types.Queue` instance. @@ -563,13 +563,13 @@ def update_queue( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.tasks_v2.types.FieldMask` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2.types.Queue` instance. @@ -646,13 +646,13 @@ def delete_queue( name (str): Required. The queue name. For example: ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Raises: google.api_core.exceptions.GoogleAPICallError: If the request @@ -718,13 +718,13 @@ def purge_queue( name (str): Required. The queue name. For example: ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2.types.Queue` instance. @@ -792,13 +792,13 @@ def pause_queue( name (str): Required. The queue name. For example: ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2.types.Queue` instance. @@ -871,13 +871,13 @@ def resume_queue( name (str): Required. The queue name. For example: ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2.types.Queue` instance. @@ -954,13 +954,13 @@ def get_iam_policy( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.tasks_v2.types.GetPolicyOptions` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2.types.Policy` instance. @@ -1047,13 +1047,13 @@ def set_iam_policy( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.tasks_v2.types.Policy` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2.types.Policy` instance. @@ -1131,13 +1131,13 @@ def test_iam_permissions( information see `IAM Overview `__. retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2.types.TestIamPermissionsResponse` instance. @@ -1240,13 +1240,13 @@ def list_tasks( streaming is performed per-page, this determines the maximum number of resources in a page. retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.api_core.page_iterator.PageIterator` instance. @@ -1338,13 +1338,13 @@ def get_task( `Google IAM `___ permission on the ``Task`` resource. retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2.types.Task` instance. @@ -1463,13 +1463,13 @@ def create_task( `Google IAM `___ permission on the ``Task`` resource. retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2.types.Task` instance. @@ -1539,13 +1539,13 @@ def delete_task( name (str): Required. The task name. For example: ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Raises: google.api_core.exceptions.GoogleAPICallError: If the request @@ -1638,13 +1638,13 @@ def run_task( `Google IAM `___ permission on the ``Task`` resource. retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2.types.Task` instance. diff --git a/tasks/google/cloud/tasks_v2/proto/cloudtasks.proto b/tasks/google/cloud/tasks_v2/proto/cloudtasks.proto index f948426a3655..d30aae519a88 100644 --- a/tasks/google/cloud/tasks_v2/proto/cloudtasks.proto +++ b/tasks/google/cloud/tasks_v2/proto/cloudtasks.proto @@ -311,7 +311,7 @@ message ListQueuesRequest { string parent = 1 [ (google.api.field_behavior) = REQUIRED, (google.api.resource_reference) = { - child_type: "locations.googleapis.com/Location" + child_type: "cloudtasks.googleapis.com/Queue" } ]; @@ -388,7 +388,7 @@ message CreateQueueRequest { string parent = 1 [ (google.api.field_behavior) = REQUIRED, (google.api.resource_reference) = { - child_type: "locations.googleapis.com/Location" + child_type: "cloudtasks.googleapis.com/Queue" } ]; @@ -470,7 +470,7 @@ message ListTasksRequest { string parent = 1 [ (google.api.field_behavior) = REQUIRED, (google.api.resource_reference) = { - child_type: "cloudtasks.googleapis.com/Queue" + child_type: "cloudtasks.googleapis.com/Task" } ]; @@ -560,7 +560,7 @@ message CreateTaskRequest { string parent = 1 [ (google.api.field_behavior) = REQUIRED, (google.api.resource_reference) = { - child_type: "cloudtasks.googleapis.com/Queue" + child_type: "cloudtasks.googleapis.com/Task" } ]; diff --git a/tasks/google/cloud/tasks_v2/proto/cloudtasks_pb2.py b/tasks/google/cloud/tasks_v2/proto/cloudtasks_pb2.py index 55beb9139d88..0ced965d5056 100644 --- a/tasks/google/cloud/tasks_v2/proto/cloudtasks_pb2.py +++ b/tasks/google/cloud/tasks_v2/proto/cloudtasks_pb2.py @@ -39,7 +39,7 @@ "\n\031com.google.cloud.tasks.v2B\017CloudTasksProtoP\001Z:google.golang.org/genproto/googleapis/cloud/tasks/v2;tasks\242\002\005TASKS" ), serialized_pb=_b( - '\n,google/cloud/tasks_v2/proto/cloudtasks.proto\x12\x15google.cloud.tasks.v2\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a\'google/cloud/tasks_v2/proto/queue.proto\x1a&google/cloud/tasks_v2/proto/task.proto\x1a\x1egoogle/iam/v1/iam_policy.proto\x1a\x1agoogle/iam/v1/policy.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto"\x85\x01\n\x11ListQueuesRequest\x12\x39\n\x06parent\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\x12!locations.googleapis.com/Location\x12\x0e\n\x06\x66ilter\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05\x12\x12\n\npage_token\x18\x04 \x01(\t"[\n\x12ListQueuesResponse\x12,\n\x06queues\x18\x01 \x03(\x0b\x32\x1c.google.cloud.tasks.v2.Queue\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"H\n\x0fGetQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"\x81\x01\n\x12\x43reateQueueRequest\x12\x39\n\x06parent\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\x12!locations.googleapis.com/Location\x12\x30\n\x05queue\x18\x02 \x01(\x0b\x32\x1c.google.cloud.tasks.v2.QueueB\x03\xe0\x41\x02"w\n\x12UpdateQueueRequest\x12\x30\n\x05queue\x18\x01 \x01(\x0b\x32\x1c.google.cloud.tasks.v2.QueueB\x03\xe0\x41\x02\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"K\n\x12\x44\x65leteQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"J\n\x11PurgeQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"J\n\x11PauseQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"K\n\x12ResumeQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"\xab\x01\n\x10ListTasksRequest\x12\x37\n\x06parent\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\x12\x1f\x63loudtasks.googleapis.com/Queue\x12\x37\n\rresponse_view\x18\x02 \x01(\x0e\x32 .google.cloud.tasks.v2.Task.View\x12\x11\n\tpage_size\x18\x03 \x01(\x05\x12\x12\n\npage_token\x18\x04 \x01(\t"X\n\x11ListTasksResponse\x12*\n\x05tasks\x18\x01 \x03(\x0b\x32\x1b.google.cloud.tasks.v2.Task\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"\x7f\n\x0eGetTaskRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1e\x63loudtasks.googleapis.com/Task\x12\x37\n\rresponse_view\x18\x02 \x01(\x0e\x32 .google.cloud.tasks.v2.Task.View"\xb5\x01\n\x11\x43reateTaskRequest\x12\x37\n\x06parent\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\x12\x1f\x63loudtasks.googleapis.com/Queue\x12.\n\x04task\x18\x02 \x01(\x0b\x32\x1b.google.cloud.tasks.v2.TaskB\x03\xe0\x41\x02\x12\x37\n\rresponse_view\x18\x03 \x01(\x0e\x32 .google.cloud.tasks.v2.Task.View"I\n\x11\x44\x65leteTaskRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1e\x63loudtasks.googleapis.com/Task"\x7f\n\x0eRunTaskRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1e\x63loudtasks.googleapis.com/Task\x12\x37\n\rresponse_view\x18\x02 \x01(\x0e\x32 .google.cloud.tasks.v2.Task.View2\xdd\x14\n\nCloudTasks\x12\x9e\x01\n\nListQueues\x12(.google.cloud.tasks.v2.ListQueuesRequest\x1a).google.cloud.tasks.v2.ListQueuesResponse";\x82\xd3\xe4\x93\x02,\x12*/v2/{parent=projects/*/locations/*}/queues\xda\x41\x06parent\x12\x8b\x01\n\x08GetQueue\x12&.google.cloud.tasks.v2.GetQueueRequest\x1a\x1c.google.cloud.tasks.v2.Queue"9\x82\xd3\xe4\x93\x02,\x12*/v2/{name=projects/*/locations/*/queues/*}\xda\x41\x04name\x12\xa0\x01\n\x0b\x43reateQueue\x12).google.cloud.tasks.v2.CreateQueueRequest\x1a\x1c.google.cloud.tasks.v2.Queue"H\x82\xd3\xe4\x93\x02\x33"*/v2/{parent=projects/*/locations/*}/queues:\x05queue\xda\x41\x0cparent,queue\x12\xab\x01\n\x0bUpdateQueue\x12).google.cloud.tasks.v2.UpdateQueueRequest\x1a\x1c.google.cloud.tasks.v2.Queue"S\x82\xd3\xe4\x93\x02\x39\x32\x30/v2/{queue.name=projects/*/locations/*/queues/*}:\x05queue\xda\x41\x11queue,update_mask\x12\x8b\x01\n\x0b\x44\x65leteQueue\x12).google.cloud.tasks.v2.DeleteQueueRequest\x1a\x16.google.protobuf.Empty"9\x82\xd3\xe4\x93\x02,**/v2/{name=projects/*/locations/*/queues/*}\xda\x41\x04name\x12\x98\x01\n\nPurgeQueue\x12(.google.cloud.tasks.v2.PurgeQueueRequest\x1a\x1c.google.cloud.tasks.v2.Queue"B\x82\xd3\xe4\x93\x02\x35"0/v2/{name=projects/*/locations/*/queues/*}:purge:\x01*\xda\x41\x04name\x12\x98\x01\n\nPauseQueue\x12(.google.cloud.tasks.v2.PauseQueueRequest\x1a\x1c.google.cloud.tasks.v2.Queue"B\x82\xd3\xe4\x93\x02\x35"0/v2/{name=projects/*/locations/*/queues/*}:pause:\x01*\xda\x41\x04name\x12\x9b\x01\n\x0bResumeQueue\x12).google.cloud.tasks.v2.ResumeQueueRequest\x1a\x1c.google.cloud.tasks.v2.Queue"C\x82\xd3\xe4\x93\x02\x36"1/v2/{name=projects/*/locations/*/queues/*}:resume:\x01*\xda\x41\x04name\x12\x9c\x01\n\x0cGetIamPolicy\x12".google.iam.v1.GetIamPolicyRequest\x1a\x15.google.iam.v1.Policy"Q\x82\xd3\xe4\x93\x02@";/v2/{resource=projects/*/locations/*/queues/*}:getIamPolicy:\x01*\xda\x41\x08resource\x12\xa3\x01\n\x0cSetIamPolicy\x12".google.iam.v1.SetIamPolicyRequest\x1a\x15.google.iam.v1.Policy"X\x82\xd3\xe4\x93\x02@";/v2/{resource=projects/*/locations/*/queues/*}:setIamPolicy:\x01*\xda\x41\x0fresource,policy\x12\xce\x01\n\x12TestIamPermissions\x12(.google.iam.v1.TestIamPermissionsRequest\x1a).google.iam.v1.TestIamPermissionsResponse"c\x82\xd3\xe4\x93\x02\x46"A/v2/{resource=projects/*/locations/*/queues/*}:testIamPermissions:\x01*\xda\x41\x14resource,permissions\x12\xa3\x01\n\tListTasks\x12\'.google.cloud.tasks.v2.ListTasksRequest\x1a(.google.cloud.tasks.v2.ListTasksResponse"C\x82\xd3\xe4\x93\x02\x34\x12\x32/v2/{parent=projects/*/locations/*/queues/*}/tasks\xda\x41\x06parent\x12\x90\x01\n\x07GetTask\x12%.google.cloud.tasks.v2.GetTaskRequest\x1a\x1b.google.cloud.tasks.v2.Task"A\x82\xd3\xe4\x93\x02\x34\x12\x32/v2/{name=projects/*/locations/*/queues/*/tasks/*}\xda\x41\x04name\x12\xa0\x01\n\nCreateTask\x12(.google.cloud.tasks.v2.CreateTaskRequest\x1a\x1b.google.cloud.tasks.v2.Task"K\x82\xd3\xe4\x93\x02\x37"2/v2/{parent=projects/*/locations/*/queues/*}/tasks:\x01*\xda\x41\x0bparent,task\x12\x91\x01\n\nDeleteTask\x12(.google.cloud.tasks.v2.DeleteTaskRequest\x1a\x16.google.protobuf.Empty"A\x82\xd3\xe4\x93\x02\x34*2/v2/{name=projects/*/locations/*/queues/*/tasks/*}\xda\x41\x04name\x12\x97\x01\n\x07RunTask\x12%.google.cloud.tasks.v2.RunTaskRequest\x1a\x1b.google.cloud.tasks.v2.Task"H\x82\xd3\xe4\x93\x02;"6/v2/{name=projects/*/locations/*/queues/*/tasks/*}:run:\x01*\xda\x41\x04name\x1aM\xca\x41\x19\x63loudtasks.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformBr\n\x19\x63om.google.cloud.tasks.v2B\x0f\x43loudTasksProtoP\x01Z:google.golang.org/genproto/googleapis/cloud/tasks/v2;tasks\xa2\x02\x05TASKSb\x06proto3' + '\n,google/cloud/tasks_v2/proto/cloudtasks.proto\x12\x15google.cloud.tasks.v2\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a\'google/cloud/tasks_v2/proto/queue.proto\x1a&google/cloud/tasks_v2/proto/task.proto\x1a\x1egoogle/iam/v1/iam_policy.proto\x1a\x1agoogle/iam/v1/policy.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto"\x83\x01\n\x11ListQueuesRequest\x12\x37\n\x06parent\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\x12\x1f\x63loudtasks.googleapis.com/Queue\x12\x0e\n\x06\x66ilter\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05\x12\x12\n\npage_token\x18\x04 \x01(\t"[\n\x12ListQueuesResponse\x12,\n\x06queues\x18\x01 \x03(\x0b\x32\x1c.google.cloud.tasks.v2.Queue\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"H\n\x0fGetQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"\x7f\n\x12\x43reateQueueRequest\x12\x37\n\x06parent\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\x12\x1f\x63loudtasks.googleapis.com/Queue\x12\x30\n\x05queue\x18\x02 \x01(\x0b\x32\x1c.google.cloud.tasks.v2.QueueB\x03\xe0\x41\x02"w\n\x12UpdateQueueRequest\x12\x30\n\x05queue\x18\x01 \x01(\x0b\x32\x1c.google.cloud.tasks.v2.QueueB\x03\xe0\x41\x02\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"K\n\x12\x44\x65leteQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"J\n\x11PurgeQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"J\n\x11PauseQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"K\n\x12ResumeQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"\xaa\x01\n\x10ListTasksRequest\x12\x36\n\x06parent\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \x12\x1e\x63loudtasks.googleapis.com/Task\x12\x37\n\rresponse_view\x18\x02 \x01(\x0e\x32 .google.cloud.tasks.v2.Task.View\x12\x11\n\tpage_size\x18\x03 \x01(\x05\x12\x12\n\npage_token\x18\x04 \x01(\t"X\n\x11ListTasksResponse\x12*\n\x05tasks\x18\x01 \x03(\x0b\x32\x1b.google.cloud.tasks.v2.Task\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"\x7f\n\x0eGetTaskRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1e\x63loudtasks.googleapis.com/Task\x12\x37\n\rresponse_view\x18\x02 \x01(\x0e\x32 .google.cloud.tasks.v2.Task.View"\xb4\x01\n\x11\x43reateTaskRequest\x12\x36\n\x06parent\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \x12\x1e\x63loudtasks.googleapis.com/Task\x12.\n\x04task\x18\x02 \x01(\x0b\x32\x1b.google.cloud.tasks.v2.TaskB\x03\xe0\x41\x02\x12\x37\n\rresponse_view\x18\x03 \x01(\x0e\x32 .google.cloud.tasks.v2.Task.View"I\n\x11\x44\x65leteTaskRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1e\x63loudtasks.googleapis.com/Task"\x7f\n\x0eRunTaskRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1e\x63loudtasks.googleapis.com/Task\x12\x37\n\rresponse_view\x18\x02 \x01(\x0e\x32 .google.cloud.tasks.v2.Task.View2\xdd\x14\n\nCloudTasks\x12\x9e\x01\n\nListQueues\x12(.google.cloud.tasks.v2.ListQueuesRequest\x1a).google.cloud.tasks.v2.ListQueuesResponse";\x82\xd3\xe4\x93\x02,\x12*/v2/{parent=projects/*/locations/*}/queues\xda\x41\x06parent\x12\x8b\x01\n\x08GetQueue\x12&.google.cloud.tasks.v2.GetQueueRequest\x1a\x1c.google.cloud.tasks.v2.Queue"9\x82\xd3\xe4\x93\x02,\x12*/v2/{name=projects/*/locations/*/queues/*}\xda\x41\x04name\x12\xa0\x01\n\x0b\x43reateQueue\x12).google.cloud.tasks.v2.CreateQueueRequest\x1a\x1c.google.cloud.tasks.v2.Queue"H\x82\xd3\xe4\x93\x02\x33"*/v2/{parent=projects/*/locations/*}/queues:\x05queue\xda\x41\x0cparent,queue\x12\xab\x01\n\x0bUpdateQueue\x12).google.cloud.tasks.v2.UpdateQueueRequest\x1a\x1c.google.cloud.tasks.v2.Queue"S\x82\xd3\xe4\x93\x02\x39\x32\x30/v2/{queue.name=projects/*/locations/*/queues/*}:\x05queue\xda\x41\x11queue,update_mask\x12\x8b\x01\n\x0b\x44\x65leteQueue\x12).google.cloud.tasks.v2.DeleteQueueRequest\x1a\x16.google.protobuf.Empty"9\x82\xd3\xe4\x93\x02,**/v2/{name=projects/*/locations/*/queues/*}\xda\x41\x04name\x12\x98\x01\n\nPurgeQueue\x12(.google.cloud.tasks.v2.PurgeQueueRequest\x1a\x1c.google.cloud.tasks.v2.Queue"B\x82\xd3\xe4\x93\x02\x35"0/v2/{name=projects/*/locations/*/queues/*}:purge:\x01*\xda\x41\x04name\x12\x98\x01\n\nPauseQueue\x12(.google.cloud.tasks.v2.PauseQueueRequest\x1a\x1c.google.cloud.tasks.v2.Queue"B\x82\xd3\xe4\x93\x02\x35"0/v2/{name=projects/*/locations/*/queues/*}:pause:\x01*\xda\x41\x04name\x12\x9b\x01\n\x0bResumeQueue\x12).google.cloud.tasks.v2.ResumeQueueRequest\x1a\x1c.google.cloud.tasks.v2.Queue"C\x82\xd3\xe4\x93\x02\x36"1/v2/{name=projects/*/locations/*/queues/*}:resume:\x01*\xda\x41\x04name\x12\x9c\x01\n\x0cGetIamPolicy\x12".google.iam.v1.GetIamPolicyRequest\x1a\x15.google.iam.v1.Policy"Q\x82\xd3\xe4\x93\x02@";/v2/{resource=projects/*/locations/*/queues/*}:getIamPolicy:\x01*\xda\x41\x08resource\x12\xa3\x01\n\x0cSetIamPolicy\x12".google.iam.v1.SetIamPolicyRequest\x1a\x15.google.iam.v1.Policy"X\x82\xd3\xe4\x93\x02@";/v2/{resource=projects/*/locations/*/queues/*}:setIamPolicy:\x01*\xda\x41\x0fresource,policy\x12\xce\x01\n\x12TestIamPermissions\x12(.google.iam.v1.TestIamPermissionsRequest\x1a).google.iam.v1.TestIamPermissionsResponse"c\x82\xd3\xe4\x93\x02\x46"A/v2/{resource=projects/*/locations/*/queues/*}:testIamPermissions:\x01*\xda\x41\x14resource,permissions\x12\xa3\x01\n\tListTasks\x12\'.google.cloud.tasks.v2.ListTasksRequest\x1a(.google.cloud.tasks.v2.ListTasksResponse"C\x82\xd3\xe4\x93\x02\x34\x12\x32/v2/{parent=projects/*/locations/*/queues/*}/tasks\xda\x41\x06parent\x12\x90\x01\n\x07GetTask\x12%.google.cloud.tasks.v2.GetTaskRequest\x1a\x1b.google.cloud.tasks.v2.Task"A\x82\xd3\xe4\x93\x02\x34\x12\x32/v2/{name=projects/*/locations/*/queues/*/tasks/*}\xda\x41\x04name\x12\xa0\x01\n\nCreateTask\x12(.google.cloud.tasks.v2.CreateTaskRequest\x1a\x1b.google.cloud.tasks.v2.Task"K\x82\xd3\xe4\x93\x02\x37"2/v2/{parent=projects/*/locations/*/queues/*}/tasks:\x01*\xda\x41\x0bparent,task\x12\x91\x01\n\nDeleteTask\x12(.google.cloud.tasks.v2.DeleteTaskRequest\x1a\x16.google.protobuf.Empty"A\x82\xd3\xe4\x93\x02\x34*2/v2/{name=projects/*/locations/*/queues/*/tasks/*}\xda\x41\x04name\x12\x97\x01\n\x07RunTask\x12%.google.cloud.tasks.v2.RunTaskRequest\x1a\x1b.google.cloud.tasks.v2.Task"H\x82\xd3\xe4\x93\x02;"6/v2/{name=projects/*/locations/*/queues/*/tasks/*}:run:\x01*\xda\x41\x04name\x1aM\xca\x41\x19\x63loudtasks.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformBr\n\x19\x63om.google.cloud.tasks.v2B\x0f\x43loudTasksProtoP\x01Z:google.golang.org/genproto/googleapis/cloud/tasks/v2;tasks\xa2\x02\x05TASKSb\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, @@ -79,7 +79,7 @@ is_extension=False, extension_scope=None, serialized_options=_b( - "\340A\002\372A#\022!locations.googleapis.com/Location" + "\340A\002\372A!\022\037cloudtasks.googleapis.com/Queue" ), file=DESCRIPTOR, ), @@ -147,7 +147,7 @@ extension_ranges=[], oneofs=[], serialized_start=391, - serialized_end=524, + serialized_end=522, ) @@ -203,8 +203,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=526, - serialized_end=617, + serialized_start=524, + serialized_end=615, ) @@ -244,8 +244,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=619, - serialized_end=691, + serialized_start=617, + serialized_end=689, ) @@ -272,7 +272,7 @@ is_extension=False, extension_scope=None, serialized_options=_b( - "\340A\002\372A#\022!locations.googleapis.com/Location" + "\340A\002\372A!\022\037cloudtasks.googleapis.com/Queue" ), file=DESCRIPTOR, ), @@ -303,8 +303,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=694, - serialized_end=823, + serialized_start=691, + serialized_end=818, ) @@ -360,8 +360,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=825, - serialized_end=944, + serialized_start=820, + serialized_end=939, ) @@ -401,8 +401,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=946, - serialized_end=1021, + serialized_start=941, + serialized_end=1016, ) @@ -442,8 +442,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1023, - serialized_end=1097, + serialized_start=1018, + serialized_end=1092, ) @@ -483,8 +483,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1099, - serialized_end=1173, + serialized_start=1094, + serialized_end=1168, ) @@ -524,8 +524,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1175, - serialized_end=1250, + serialized_start=1170, + serialized_end=1245, ) @@ -552,7 +552,7 @@ is_extension=False, extension_scope=None, serialized_options=_b( - "\340A\002\372A!\022\037cloudtasks.googleapis.com/Queue" + "\340A\002\372A \022\036cloudtasks.googleapis.com/Task" ), file=DESCRIPTOR, ), @@ -619,8 +619,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1253, - serialized_end=1424, + serialized_start=1248, + serialized_end=1418, ) @@ -676,8 +676,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1426, - serialized_end=1514, + serialized_start=1420, + serialized_end=1508, ) @@ -735,8 +735,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1516, - serialized_end=1643, + serialized_start=1510, + serialized_end=1637, ) @@ -763,7 +763,7 @@ is_extension=False, extension_scope=None, serialized_options=_b( - "\340A\002\372A!\022\037cloudtasks.googleapis.com/Queue" + "\340A\002\372A \022\036cloudtasks.googleapis.com/Task" ), file=DESCRIPTOR, ), @@ -812,8 +812,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1646, - serialized_end=1827, + serialized_start=1640, + serialized_end=1820, ) @@ -853,8 +853,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1829, - serialized_end=1902, + serialized_start=1822, + serialized_end=1895, ) @@ -912,8 +912,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1904, - serialized_end=2031, + serialized_start=1897, + serialized_end=2024, ) _LISTQUEUESRESPONSE.fields_by_name[ @@ -1441,8 +1441,8 @@ serialized_options=_b( "\312A\031cloudtasks.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform" ), - serialized_start=2034, - serialized_end=4687, + serialized_start=2027, + serialized_end=4680, methods=[ _descriptor.MethodDescriptor( name="ListQueues", diff --git a/tasks/google/cloud/tasks_v2/proto/queue.proto b/tasks/google/cloud/tasks_v2/proto/queue.proto index ce6a90244d7b..b50f5aec8b91 100644 --- a/tasks/google/cloud/tasks_v2/proto/queue.proto +++ b/tasks/google/cloud/tasks_v2/proto/queue.proto @@ -99,6 +99,7 @@ message Queue { // [task-level app_engine_routing][google.cloud.tasks.v2.AppEngineHttpRequest.app_engine_routing]. // These settings apply only to // [App Engine tasks][google.cloud.tasks.v2.AppEngineHttpRequest] in this queue. + // [Http tasks][google.cloud.tasks.v2.HttpRequest] are not affected. // // If set, `app_engine_routing_override` is used for all // [App Engine tasks][google.cloud.tasks.v2.AppEngineHttpRequest] in the queue, no matter what the diff --git a/tasks/google/cloud/tasks_v2/proto/queue_pb2.py b/tasks/google/cloud/tasks_v2/proto/queue_pb2.py index e7ae7d7e2b16..282827c52b09 100644 --- a/tasks/google/cloud/tasks_v2/proto/queue_pb2.py +++ b/tasks/google/cloud/tasks_v2/proto/queue_pb2.py @@ -452,7 +452,8 @@ asks.v2.AppEngineHttpRequest.app\_engine\_routing]. These settings apply only to [App Engine tasks][google.cloud.tasks.v2.AppEngineHttpRequest] in this - queue. If set, ``app_engine_routing_override`` is used for + queue. [Http tasks][google.cloud.tasks.v2.HttpRequest] are not + affected. If set, ``app_engine_routing_override`` is used for all [App Engine tasks][google.cloud.tasks.v2.AppEngineHttpRequest] in the queue, no matter what the setting is for the [task-level app\_ diff --git a/tasks/google/cloud/tasks_v2/proto/target.proto b/tasks/google/cloud/tasks_v2/proto/target.proto index acd38fedee3a..04a88ce9ded0 100644 --- a/tasks/google/cloud/tasks_v2/proto/target.proto +++ b/tasks/google/cloud/tasks_v2/proto/target.proto @@ -17,6 +17,7 @@ syntax = "proto3"; package google.cloud.tasks.v2; +import "google/api/field_behavior.proto"; import "google/api/annotations.proto"; option go_package = "google.golang.org/genproto/googleapis/cloud/tasks/v2;tasks"; @@ -24,6 +25,112 @@ option java_multiple_files = true; option java_outer_classname = "TargetProto"; option java_package = "com.google.cloud.tasks.v2"; +// HTTP request. +// +// The task will be pushed to the worker as an HTTP request. If the worker +// or the redirected worker acknowledges the task by returning a successful HTTP +// response code ([`200` - `299`]), the task will removed from the queue. If +// any other HTTP response code is returned or no response is received, the +// task will be retried according to the following: +// +// * User-specified throttling: [retry configuration][google.cloud.tasks.v2.Queue.retry_config], +// [rate limits][google.cloud.tasks.v2.Queue.rate_limits], and the [queue's state][google.cloud.tasks.v2.Queue.state]. +// +// * System throttling: To prevent the worker from overloading, Cloud Tasks may +// temporarily reduce the queue's effective rate. User-specified settings +// will not be changed. +// +// System throttling happens because: +// +// * Cloud Tasks backs off on all errors. Normally the backoff specified in +// [rate limits][google.cloud.tasks.v2.Queue.rate_limits] will be used. But if the worker returns +// `429` (Too Many Requests), `503` (Service Unavailable), or the rate of +// errors is high, Cloud Tasks will use a higher backoff rate. The retry +// specified in the `Retry-After` HTTP response header is considered. +// +// * To prevent traffic spikes and to smooth sudden large traffic spikes, +// dispatches ramp up slowly when the queue is newly created or idle and +// if large numbers of tasks suddenly become available to dispatch (due to +// spikes in create task rates, the queue being unpaused, or many tasks +// that are scheduled at the same time). +message HttpRequest { + // Required. The full url path that the request will be sent to. + // + // This string must begin with either "http://" or "https://". Some examples + // are: `http://acme.com` and `https://acme.com/sales:8080`. Cloud Tasks will + // encode some characters for safety and compatibility. The maximum allowed + // URL length is 2083 characters after encoding. + // + // The `Location` header response from a redirect response [`300` - `399`] + // may be followed. The redirect is not counted as a separate attempt. + string url = 1 [(google.api.field_behavior) = REQUIRED]; + + // The HTTP method to use for the request. The default is POST. + HttpMethod http_method = 2; + + // HTTP request headers. + // + // This map contains the header field names and values. + // Headers can be set when the + // [task is created][google.cloud.tasks.v2beta3.CloudTasks.CreateTask]. + // + // These headers represent a subset of the headers that will accompany the + // task's HTTP request. Some HTTP request headers will be ignored or replaced. + // + // A partial list of headers that will be ignored or replaced is: + // + // * Host: This will be computed by Cloud Tasks and derived from + // [HttpRequest.url][google.cloud.tasks.v2.HttpRequest.url]. + // * Content-Length: This will be computed by Cloud Tasks. + // * User-Agent: This will be set to `"Google-Cloud-Tasks"`. + // * X-Google-*: Google use only. + // * X-AppEngine-*: Google use only. + // + // `Content-Type` won't be set by Cloud Tasks. You can explicitly set + // `Content-Type` to a media type when the + // [task is created][google.cloud.tasks.v2beta3.CloudTasks.CreateTask]. + // For example, `Content-Type` can be set to `"application/octet-stream"` or + // `"application/json"`. + // + // Headers which can have multiple values (according to RFC2616) can be + // specified using comma-separated values. + // + // The size of the headers must be less than 80KB. + map headers = 3; + + // HTTP request body. + // + // A request body is allowed only if the + // [HTTP method][google.cloud.tasks.v2.HttpRequest.http_method] is POST, PUT, or PATCH. It is an + // error to set body on a task with an incompatible [HttpMethod][google.cloud.tasks.v2.HttpMethod]. + bytes body = 4; + + // The mode for generating an `Authorization` header for HTTP requests. + // + // If specified, all `Authorization` headers in the [HttpRequest.headers][google.cloud.tasks.v2.HttpRequest.headers] + // field will be overridden. + oneof authorization_header { + // If specified, an + // [OAuth token](https://developers.google.com/identity/protocols/OAuth2) + // will be generated and attached as an `Authorization` header in the HTTP + // request. + // + // This type of authorization should generally only be used when calling + // Google APIs hosted on *.googleapis.com. + OAuthToken oauth_token = 5; + + // If specified, an + // [OIDC](https://developers.google.com/identity/protocols/OpenIDConnect) + // token will be generated and attached as an `Authorization` header in the + // HTTP request. + // + // This type of authorization can be used for many scenarios, including + // calling Cloud Run, or endpoints where you intend to validate the token + // yourself. + OidcToken oidc_token = 6; + } +} + // App Engine HTTP request. // // The message defines the HTTP request that is sent to an App Engine app when @@ -278,3 +385,40 @@ enum HttpMethod { // HTTP OPTIONS OPTIONS = 7; } + +// Contains information needed for generating an +// [OAuth token](https://developers.google.com/identity/protocols/OAuth2). +// This type of authorization should generally only be used when calling Google +// APIs hosted on *.googleapis.com. +message OAuthToken { + // [Service account email](https://cloud.google.com/iam/docs/service-accounts) + // to be used for generating OAuth token. + // The service account must be within the same project as the queue. The + // caller must have iam.serviceAccounts.actAs permission for the service + // account. + string service_account_email = 1; + + // OAuth scope to be used for generating OAuth access token. + // If not specified, "https://www.googleapis.com/auth/cloud-platform" + // will be used. + string scope = 2; +} + +// Contains information needed for generating an +// [OpenID Connect +// token](https://developers.google.com/identity/protocols/OpenIDConnect). +// This type of authorization can be used for many scenarios, including +// calling Cloud Run, or endpoints where you intend to validate the token +// yourself. +message OidcToken { + // [Service account email](https://cloud.google.com/iam/docs/service-accounts) + // to be used for generating OIDC token. + // The service account must be within the same project as the queue. The + // caller must have iam.serviceAccounts.actAs permission for the service + // account. + string service_account_email = 1; + + // Audience to be used when generating OIDC token. If not specified, the URI + // specified in target will be used. + string audience = 2; +} diff --git a/tasks/google/cloud/tasks_v2/proto/target_pb2.py b/tasks/google/cloud/tasks_v2/proto/target_pb2.py index 0203f5333672..2f894cc62d2f 100644 --- a/tasks/google/cloud/tasks_v2/proto/target_pb2.py +++ b/tasks/google/cloud/tasks_v2/proto/target_pb2.py @@ -16,6 +16,7 @@ _sym_db = _symbol_database.Default() +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 @@ -27,9 +28,12 @@ "\n\031com.google.cloud.tasks.v2B\013TargetProtoP\001Z:google.golang.org/genproto/googleapis/cloud/tasks/v2;tasks" ), serialized_pb=_b( - '\n(google/cloud/tasks_v2/proto/target.proto\x12\x15google.cloud.tasks.v2\x1a\x1cgoogle/api/annotations.proto"\xb2\x02\n\x14\x41ppEngineHttpRequest\x12\x36\n\x0bhttp_method\x18\x01 \x01(\x0e\x32!.google.cloud.tasks.v2.HttpMethod\x12\x43\n\x12\x61pp_engine_routing\x18\x02 \x01(\x0b\x32\'.google.cloud.tasks.v2.AppEngineRouting\x12\x14\n\x0crelative_uri\x18\x03 \x01(\t\x12I\n\x07headers\x18\x04 \x03(\x0b\x32\x38.google.cloud.tasks.v2.AppEngineHttpRequest.HeadersEntry\x12\x0c\n\x04\x62ody\x18\x05 \x01(\x0c\x1a.\n\x0cHeadersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"T\n\x10\x41ppEngineRouting\x12\x0f\n\x07service\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\t\x12\x10\n\x08instance\x18\x03 \x01(\t\x12\x0c\n\x04host\x18\x04 \x01(\t*s\n\nHttpMethod\x12\x1b\n\x17HTTP_METHOD_UNSPECIFIED\x10\x00\x12\x08\n\x04POST\x10\x01\x12\x07\n\x03GET\x10\x02\x12\x08\n\x04HEAD\x10\x03\x12\x07\n\x03PUT\x10\x04\x12\n\n\x06\x44\x45LETE\x10\x05\x12\t\n\x05PATCH\x10\x06\x12\x0b\n\x07OPTIONS\x10\x07\x42\x66\n\x19\x63om.google.cloud.tasks.v2B\x0bTargetProtoP\x01Z:google.golang.org/genproto/googleapis/cloud/tasks/v2;tasksb\x06proto3' + '\n(google/cloud/tasks_v2/proto/target.proto\x12\x15google.cloud.tasks.v2\x1a\x1fgoogle/api/field_behavior.proto\x1a\x1cgoogle/api/annotations.proto"\xe1\x02\n\x0bHttpRequest\x12\x10\n\x03url\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x36\n\x0bhttp_method\x18\x02 \x01(\x0e\x32!.google.cloud.tasks.v2.HttpMethod\x12@\n\x07headers\x18\x03 \x03(\x0b\x32/.google.cloud.tasks.v2.HttpRequest.HeadersEntry\x12\x0c\n\x04\x62ody\x18\x04 \x01(\x0c\x12\x38\n\x0boauth_token\x18\x05 \x01(\x0b\x32!.google.cloud.tasks.v2.OAuthTokenH\x00\x12\x36\n\noidc_token\x18\x06 \x01(\x0b\x32 .google.cloud.tasks.v2.OidcTokenH\x00\x1a.\n\x0cHeadersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x16\n\x14\x61uthorization_header"\xb2\x02\n\x14\x41ppEngineHttpRequest\x12\x36\n\x0bhttp_method\x18\x01 \x01(\x0e\x32!.google.cloud.tasks.v2.HttpMethod\x12\x43\n\x12\x61pp_engine_routing\x18\x02 \x01(\x0b\x32\'.google.cloud.tasks.v2.AppEngineRouting\x12\x14\n\x0crelative_uri\x18\x03 \x01(\t\x12I\n\x07headers\x18\x04 \x03(\x0b\x32\x38.google.cloud.tasks.v2.AppEngineHttpRequest.HeadersEntry\x12\x0c\n\x04\x62ody\x18\x05 \x01(\x0c\x1a.\n\x0cHeadersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"T\n\x10\x41ppEngineRouting\x12\x0f\n\x07service\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\t\x12\x10\n\x08instance\x18\x03 \x01(\t\x12\x0c\n\x04host\x18\x04 \x01(\t":\n\nOAuthToken\x12\x1d\n\x15service_account_email\x18\x01 \x01(\t\x12\r\n\x05scope\x18\x02 \x01(\t"<\n\tOidcToken\x12\x1d\n\x15service_account_email\x18\x01 \x01(\t\x12\x10\n\x08\x61udience\x18\x02 \x01(\t*s\n\nHttpMethod\x12\x1b\n\x17HTTP_METHOD_UNSPECIFIED\x10\x00\x12\x08\n\x04POST\x10\x01\x12\x07\n\x03GET\x10\x02\x12\x08\n\x04HEAD\x10\x03\x12\x07\n\x03PUT\x10\x04\x12\n\n\x06\x44\x45LETE\x10\x05\x12\t\n\x05PATCH\x10\x06\x12\x0b\n\x07OPTIONS\x10\x07\x42\x66\n\x19\x63om.google.cloud.tasks.v2B\x0bTargetProtoP\x01Z:google.golang.org/genproto/googleapis/cloud/tasks/v2;tasksb\x06proto3' ), - dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR], + dependencies=[ + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, + google_dot_api_dot_annotations__pb2.DESCRIPTOR, + ], ) _HTTPMETHOD = _descriptor.EnumDescriptor( @@ -69,8 +73,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=492, - serialized_end=607, + serialized_start=1003, + serialized_end=1118, ) _sym_db.RegisterEnumDescriptor(_HTTPMETHOD) @@ -85,6 +89,199 @@ OPTIONS = 7 +_HTTPREQUEST_HEADERSENTRY = _descriptor.Descriptor( + name="HeadersEntry", + full_name="google.cloud.tasks.v2.HttpRequest.HeadersEntry", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="key", + full_name="google.cloud.tasks.v2.HttpRequest.HeadersEntry.key", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="value", + full_name="google.cloud.tasks.v2.HttpRequest.HeadersEntry.value", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=_b("8\001"), + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=414, + serialized_end=460, +) + +_HTTPREQUEST = _descriptor.Descriptor( + name="HttpRequest", + full_name="google.cloud.tasks.v2.HttpRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="url", + full_name="google.cloud.tasks.v2.HttpRequest.url", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\002"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="http_method", + full_name="google.cloud.tasks.v2.HttpRequest.http_method", + index=1, + number=2, + type=14, + cpp_type=8, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="headers", + full_name="google.cloud.tasks.v2.HttpRequest.headers", + index=2, + number=3, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="body", + full_name="google.cloud.tasks.v2.HttpRequest.body", + index=3, + number=4, + type=12, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b(""), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="oauth_token", + full_name="google.cloud.tasks.v2.HttpRequest.oauth_token", + index=4, + number=5, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="oidc_token", + full_name="google.cloud.tasks.v2.HttpRequest.oidc_token", + index=5, + number=6, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[_HTTPREQUEST_HEADERSENTRY], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name="authorization_header", + full_name="google.cloud.tasks.v2.HttpRequest.authorization_header", + index=0, + containing_type=None, + fields=[], + ) + ], + serialized_start=131, + serialized_end=484, +) + + _APPENGINEHTTPREQUEST_HEADERSENTRY = _descriptor.Descriptor( name="HeadersEntry", full_name="google.cloud.tasks.v2.AppEngineHttpRequest.HeadersEntry", @@ -137,8 +334,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=358, - serialized_end=404, + serialized_start=414, + serialized_end=460, ) _APPENGINEHTTPREQUEST = _descriptor.Descriptor( @@ -247,8 +444,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=98, - serialized_end=404, + serialized_start=487, + serialized_end=793, ) @@ -340,10 +537,141 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=406, - serialized_end=490, + serialized_start=795, + serialized_end=879, +) + + +_OAUTHTOKEN = _descriptor.Descriptor( + name="OAuthToken", + full_name="google.cloud.tasks.v2.OAuthToken", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="service_account_email", + full_name="google.cloud.tasks.v2.OAuthToken.service_account_email", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="scope", + full_name="google.cloud.tasks.v2.OAuthToken.scope", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=881, + serialized_end=939, +) + + +_OIDCTOKEN = _descriptor.Descriptor( + name="OidcToken", + full_name="google.cloud.tasks.v2.OidcToken", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="service_account_email", + full_name="google.cloud.tasks.v2.OidcToken.service_account_email", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="audience", + full_name="google.cloud.tasks.v2.OidcToken.audience", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=941, + serialized_end=1001, ) +_HTTPREQUEST_HEADERSENTRY.containing_type = _HTTPREQUEST +_HTTPREQUEST.fields_by_name["http_method"].enum_type = _HTTPMETHOD +_HTTPREQUEST.fields_by_name["headers"].message_type = _HTTPREQUEST_HEADERSENTRY +_HTTPREQUEST.fields_by_name["oauth_token"].message_type = _OAUTHTOKEN +_HTTPREQUEST.fields_by_name["oidc_token"].message_type = _OIDCTOKEN +_HTTPREQUEST.oneofs_by_name["authorization_header"].fields.append( + _HTTPREQUEST.fields_by_name["oauth_token"] +) +_HTTPREQUEST.fields_by_name[ + "oauth_token" +].containing_oneof = _HTTPREQUEST.oneofs_by_name["authorization_header"] +_HTTPREQUEST.oneofs_by_name["authorization_header"].fields.append( + _HTTPREQUEST.fields_by_name["oidc_token"] +) +_HTTPREQUEST.fields_by_name[ + "oidc_token" +].containing_oneof = _HTTPREQUEST.oneofs_by_name["authorization_header"] _APPENGINEHTTPREQUEST_HEADERSENTRY.containing_type = _APPENGINEHTTPREQUEST _APPENGINEHTTPREQUEST.fields_by_name["http_method"].enum_type = _HTTPMETHOD _APPENGINEHTTPREQUEST.fields_by_name[ @@ -352,11 +680,128 @@ _APPENGINEHTTPREQUEST.fields_by_name[ "headers" ].message_type = _APPENGINEHTTPREQUEST_HEADERSENTRY +DESCRIPTOR.message_types_by_name["HttpRequest"] = _HTTPREQUEST DESCRIPTOR.message_types_by_name["AppEngineHttpRequest"] = _APPENGINEHTTPREQUEST DESCRIPTOR.message_types_by_name["AppEngineRouting"] = _APPENGINEROUTING +DESCRIPTOR.message_types_by_name["OAuthToken"] = _OAUTHTOKEN +DESCRIPTOR.message_types_by_name["OidcToken"] = _OIDCTOKEN DESCRIPTOR.enum_types_by_name["HttpMethod"] = _HTTPMETHOD _sym_db.RegisterFileDescriptor(DESCRIPTOR) +HttpRequest = _reflection.GeneratedProtocolMessageType( + "HttpRequest", + (_message.Message,), + dict( + HeadersEntry=_reflection.GeneratedProtocolMessageType( + "HeadersEntry", + (_message.Message,), + dict( + DESCRIPTOR=_HTTPREQUEST_HEADERSENTRY, + __module__="google.cloud.tasks_v2.proto.target_pb2" + # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2.HttpRequest.HeadersEntry) + ), + ), + DESCRIPTOR=_HTTPREQUEST, + __module__="google.cloud.tasks_v2.proto.target_pb2", + __doc__="""HTTP request. + + The task will be pushed to the worker as an HTTP request. If the worker + or the redirected worker acknowledges the task by returning a successful + HTTP response code ([``200`` - ``299``]), the task will removed from the + queue. If any other HTTP response code is returned or no response is + received, the task will be retried according to the following: + + - User-specified throttling: [retry + configuration][google.cloud.tasks.v2.Queue.retry\_config], [rate + limits][google.cloud.tasks.v2.Queue.rate\_limits], and the [queue's + state][google.cloud.tasks.v2.Queue.state]. + + - System throttling: To prevent the worker from overloading, Cloud + Tasks may temporarily reduce the queue's effective rate. + User-specified settings will not be changed. + + System throttling happens because: + + - Cloud Tasks backs off on all errors. Normally the backoff specified + in [rate limits][google.cloud.tasks.v2.Queue.rate\_limits] will be + used. But if the worker returns ``429`` (Too Many Requests), ``503`` + (Service Unavailable), or the rate of errors is high, Cloud Tasks + will use a higher backoff rate. The retry specified in the + ``Retry-After`` HTTP response header is considered. + + - To prevent traffic spikes and to smooth sudden large traffic spikes, + dispatches ramp up slowly when the queue is newly created or idle and + if large numbers of tasks suddenly become available to dispatch (due + to spikes in create task rates, the queue being unpaused, or many + tasks that are scheduled at the same time). + + + Attributes: + url: + Required. The full url path that the request will be sent to. + This string must begin with either "http://" or "https://". + Some examples are: ``http://acme.com`` and + ``https://acme.com/sales:8080``. Cloud Tasks will encode some + characters for safety and compatibility. The maximum allowed + URL length is 2083 characters after encoding. The + ``Location`` header response from a redirect response [``300`` + - ``399``] may be followed. The redirect is not counted as a + separate attempt. + http_method: + The HTTP method to use for the request. The default is POST. + headers: + HTTP request headers. This map contains the header field + names and values. Headers can be set when the [task is + created][google.cloud.tasks.v2beta3.CloudTasks.CreateTask]. + These headers represent a subset of the headers that will + accompany the task's HTTP request. Some HTTP request headers + will be ignored or replaced. A partial list of headers that + will be ignored or replaced is: - Host: This will be + computed by Cloud Tasks and derived from + [HttpRequest.url][google.cloud.tasks.v2.HttpRequest.url]. - + Content-Length: This will be computed by Cloud Tasks. - User- + Agent: This will be set to ``"Google-Cloud-Tasks"``. - + X-Google-\*: Google use only. - X-AppEngine-\*: Google use + only. ``Content-Type`` won't be set by Cloud Tasks. You can + explicitly set ``Content-Type`` to a media type when the [task + is created][google.cloud.tasks.v2beta3.CloudTasks.CreateTask]. + For example, ``Content-Type`` can be set to + ``"application/octet-stream"`` or ``"application/json"``. + Headers which can have multiple values (according to RFC2616) + can be specified using comma-separated values. The size of + the headers must be less than 80KB. + body: + HTTP request body. A request body is allowed only if the + [HTTP method][google.cloud.tasks.v2.HttpRequest.http\_method] + is POST, PUT, or PATCH. It is an error to set body on a task + with an incompatible + [HttpMethod][google.cloud.tasks.v2.HttpMethod]. + authorization_header: + The mode for generating an ``Authorization`` header for HTTP + requests. If specified, all ``Authorization`` headers in the + [HttpRequest.headers][google.cloud.tasks.v2.HttpRequest.header + s] field will be overridden. + oauth_token: + If specified, an `OAuth token + `_ + will be generated and attached as an ``Authorization`` header + in the HTTP request. This type of authorization should + generally only be used when calling Google APIs hosted on + \*.googleapis.com. + oidc_token: + If specified, an `OIDC `_ token will be generated and + attached as an ``Authorization`` header in the HTTP request. + This type of authorization can be used for many scenarios, + including calling Cloud Run, or endpoints where you intend to + validate the token yourself. + """, + # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2.HttpRequest) + ), +) +_sym_db.RegisterMessage(HttpRequest) +_sym_db.RegisterMessage(HttpRequest.HeadersEntry) + AppEngineHttpRequest = _reflection.GeneratedProtocolMessageType( "AppEngineHttpRequest", (_message.Message,), @@ -605,7 +1050,67 @@ ) _sym_db.RegisterMessage(AppEngineRouting) +OAuthToken = _reflection.GeneratedProtocolMessageType( + "OAuthToken", + (_message.Message,), + dict( + DESCRIPTOR=_OAUTHTOKEN, + __module__="google.cloud.tasks_v2.proto.target_pb2", + __doc__="""Contains information needed for generating an `OAuth + token `_. This + type of authorization should generally only be used when calling Google + APIs hosted on \*.googleapis.com. + + + Attributes: + service_account_email: + `Service account email + `_ to be + used for generating OAuth token. The service account must be + within the same project as the queue. The caller must have + iam.serviceAccounts.actAs permission for the service account. + scope: + OAuth scope to be used for generating OAuth access token. If + not specified, "https://www.googleapis.com/auth/cloud- + platform" will be used. + """, + # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2.OAuthToken) + ), +) +_sym_db.RegisterMessage(OAuthToken) + +OidcToken = _reflection.GeneratedProtocolMessageType( + "OidcToken", + (_message.Message,), + dict( + DESCRIPTOR=_OIDCTOKEN, + __module__="google.cloud.tasks_v2.proto.target_pb2", + __doc__="""Contains information needed for generating an `OpenID Connect + token `_. + This type of authorization can be used for many scenarios, including + calling Cloud Run, or endpoints where you intend to validate the token + yourself. + + + Attributes: + service_account_email: + `Service account email + `_ to be + used for generating OIDC token. The service account must be + within the same project as the queue. The caller must have + iam.serviceAccounts.actAs permission for the service account. + audience: + Audience to be used when generating OIDC token. If not + specified, the URI specified in target will be used. + """, + # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2.OidcToken) + ), +) +_sym_db.RegisterMessage(OidcToken) + DESCRIPTOR._options = None +_HTTPREQUEST_HEADERSENTRY._options = None +_HTTPREQUEST.fields_by_name["url"]._options = None _APPENGINEHTTPREQUEST_HEADERSENTRY._options = None # @@protoc_insertion_point(module_scope) diff --git a/tasks/google/cloud/tasks_v2/proto/task.proto b/tasks/google/cloud/tasks_v2/proto/task.proto index a555c2c19689..70a2baba4c53 100644 --- a/tasks/google/cloud/tasks_v2/proto/task.proto +++ b/tasks/google/cloud/tasks_v2/proto/task.proto @@ -93,6 +93,11 @@ message Task { // // An App Engine task is a task that has [AppEngineHttpRequest][google.cloud.tasks.v2.AppEngineHttpRequest] set. AppEngineHttpRequest app_engine_http_request = 2; + + // HTTP request that is sent to the worker. + // + // An HTTP task is a task that has [HttpRequest][google.cloud.tasks.v2.HttpRequest] set. + HttpRequest http_request = 3; } // The time when the task is scheduled to be attempted or retried. @@ -117,6 +122,8 @@ message Task { // // The default and maximum values depend on the type of request: // + // * For [HTTP tasks][google.cloud.tasks.v2.HttpRequest], the default is 10 minutes. The deadline + // must be in the interval [15 seconds, 30 minutes]. // // * For [App Engine tasks][google.cloud.tasks.v2.AppEngineHttpRequest], 0 indicates that the // request has the default deadline. The default deadline depends on the diff --git a/tasks/google/cloud/tasks_v2/proto/task_pb2.py b/tasks/google/cloud/tasks_v2/proto/task_pb2.py index 996e33c624ad..b8d6ffb30ff3 100644 --- a/tasks/google/cloud/tasks_v2/proto/task_pb2.py +++ b/tasks/google/cloud/tasks_v2/proto/task_pb2.py @@ -33,7 +33,7 @@ "\n\031com.google.cloud.tasks.v2B\tTaskProtoP\001Z:google.golang.org/genproto/googleapis/cloud/tasks/v2;tasks" ), serialized_pb=_b( - '\n&google/cloud/tasks_v2/proto/task.proto\x12\x15google.cloud.tasks.v2\x1a\x19google/api/resource.proto\x1a(google/cloud/tasks_v2/proto/target.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto\x1a\x1cgoogle/api/annotations.proto"\xf8\x04\n\x04Task\x12\x0c\n\x04name\x18\x01 \x01(\t\x12N\n\x17\x61pp_engine_http_request\x18\x02 \x01(\x0b\x32+.google.cloud.tasks.v2.AppEngineHttpRequestH\x00\x12\x31\n\rschedule_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0b\x63reate_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x34\n\x11\x64ispatch_deadline\x18\x06 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x16\n\x0e\x64ispatch_count\x18\x07 \x01(\x05\x12\x16\n\x0eresponse_count\x18\x08 \x01(\x05\x12\x35\n\rfirst_attempt\x18\t \x01(\x0b\x32\x1e.google.cloud.tasks.v2.Attempt\x12\x34\n\x0clast_attempt\x18\n \x01(\x0b\x32\x1e.google.cloud.tasks.v2.Attempt\x12.\n\x04view\x18\x0b \x01(\x0e\x32 .google.cloud.tasks.v2.Task.View"1\n\x04View\x12\x14\n\x10VIEW_UNSPECIFIED\x10\x00\x12\t\n\x05\x42\x41SIC\x10\x01\x12\x08\n\x04\x46ULL\x10\x02:h\xea\x41\x65\n\x1e\x63loudtasks.googleapis.com/Task\x12\x43projects/{project}/locations/{location}/queues/{queue}/tasks/{task}B\x0e\n\x0cmessage_type"\xcf\x01\n\x07\x41ttempt\x12\x31\n\rschedule_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x31\n\rdispatch_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x31\n\rresponse_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12+\n\x0fresponse_status\x18\x04 \x01(\x0b\x32\x12.google.rpc.StatusBd\n\x19\x63om.google.cloud.tasks.v2B\tTaskProtoP\x01Z:google.golang.org/genproto/googleapis/cloud/tasks/v2;tasksb\x06proto3' + '\n&google/cloud/tasks_v2/proto/task.proto\x12\x15google.cloud.tasks.v2\x1a\x19google/api/resource.proto\x1a(google/cloud/tasks_v2/proto/target.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto\x1a\x1cgoogle/api/annotations.proto"\xb4\x05\n\x04Task\x12\x0c\n\x04name\x18\x01 \x01(\t\x12N\n\x17\x61pp_engine_http_request\x18\x02 \x01(\x0b\x32+.google.cloud.tasks.v2.AppEngineHttpRequestH\x00\x12:\n\x0chttp_request\x18\x03 \x01(\x0b\x32".google.cloud.tasks.v2.HttpRequestH\x00\x12\x31\n\rschedule_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0b\x63reate_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x34\n\x11\x64ispatch_deadline\x18\x06 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x16\n\x0e\x64ispatch_count\x18\x07 \x01(\x05\x12\x16\n\x0eresponse_count\x18\x08 \x01(\x05\x12\x35\n\rfirst_attempt\x18\t \x01(\x0b\x32\x1e.google.cloud.tasks.v2.Attempt\x12\x34\n\x0clast_attempt\x18\n \x01(\x0b\x32\x1e.google.cloud.tasks.v2.Attempt\x12.\n\x04view\x18\x0b \x01(\x0e\x32 .google.cloud.tasks.v2.Task.View"1\n\x04View\x12\x14\n\x10VIEW_UNSPECIFIED\x10\x00\x12\t\n\x05\x42\x41SIC\x10\x01\x12\x08\n\x04\x46ULL\x10\x02:h\xea\x41\x65\n\x1e\x63loudtasks.googleapis.com/Task\x12\x43projects/{project}/locations/{location}/queues/{queue}/tasks/{task}B\x0e\n\x0cmessage_type"\xcf\x01\n\x07\x41ttempt\x12\x31\n\rschedule_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x31\n\rdispatch_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x31\n\rresponse_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12+\n\x0fresponse_status\x18\x04 \x01(\x0b\x32\x12.google.rpc.StatusBd\n\x19\x63om.google.cloud.tasks.v2B\tTaskProtoP\x01Z:google.golang.org/genproto/googleapis/cloud/tasks/v2;tasksb\x06proto3' ), dependencies=[ google_dot_api_dot_resource__pb2.DESCRIPTOR, @@ -68,8 +68,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=716, - serialized_end=765, + serialized_start=776, + serialized_end=825, ) _sym_db.RegisterEnumDescriptor(_TASK_VIEW) @@ -117,10 +117,28 @@ serialized_options=None, file=DESCRIPTOR, ), + _descriptor.FieldDescriptor( + name="http_request", + full_name="google.cloud.tasks.v2.Task.http_request", + index=2, + number=3, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), _descriptor.FieldDescriptor( name="schedule_time", full_name="google.cloud.tasks.v2.Task.schedule_time", - index=2, + index=3, number=4, type=11, cpp_type=10, @@ -138,7 +156,7 @@ _descriptor.FieldDescriptor( name="create_time", full_name="google.cloud.tasks.v2.Task.create_time", - index=3, + index=4, number=5, type=11, cpp_type=10, @@ -156,7 +174,7 @@ _descriptor.FieldDescriptor( name="dispatch_deadline", full_name="google.cloud.tasks.v2.Task.dispatch_deadline", - index=4, + index=5, number=6, type=11, cpp_type=10, @@ -174,7 +192,7 @@ _descriptor.FieldDescriptor( name="dispatch_count", full_name="google.cloud.tasks.v2.Task.dispatch_count", - index=5, + index=6, number=7, type=5, cpp_type=1, @@ -192,7 +210,7 @@ _descriptor.FieldDescriptor( name="response_count", full_name="google.cloud.tasks.v2.Task.response_count", - index=6, + index=7, number=8, type=5, cpp_type=1, @@ -210,7 +228,7 @@ _descriptor.FieldDescriptor( name="first_attempt", full_name="google.cloud.tasks.v2.Task.first_attempt", - index=7, + index=8, number=9, type=11, cpp_type=10, @@ -228,7 +246,7 @@ _descriptor.FieldDescriptor( name="last_attempt", full_name="google.cloud.tasks.v2.Task.last_attempt", - index=8, + index=9, number=10, type=11, cpp_type=10, @@ -246,7 +264,7 @@ _descriptor.FieldDescriptor( name="view", full_name="google.cloud.tasks.v2.Task.view", - index=9, + index=10, number=11, type=14, cpp_type=8, @@ -281,7 +299,7 @@ ) ], serialized_start=255, - serialized_end=887, + serialized_end=947, ) @@ -373,8 +391,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=890, - serialized_end=1097, + serialized_start=950, + serialized_end=1157, ) _TASK.fields_by_name[ @@ -382,6 +400,9 @@ ].message_type = ( google_dot_cloud_dot_tasks__v2_dot_proto_dot_target__pb2._APPENGINEHTTPREQUEST ) +_TASK.fields_by_name[ + "http_request" +].message_type = google_dot_cloud_dot_tasks__v2_dot_proto_dot_target__pb2._HTTPREQUEST _TASK.fields_by_name[ "schedule_time" ].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP @@ -401,6 +422,10 @@ _TASK.fields_by_name["app_engine_http_request"].containing_oneof = _TASK.oneofs_by_name[ "message_type" ] +_TASK.oneofs_by_name["message_type"].fields.append(_TASK.fields_by_name["http_request"]) +_TASK.fields_by_name["http_request"].containing_oneof = _TASK.oneofs_by_name[ + "message_type" +] _ATTEMPT.fields_by_name[ "schedule_time" ].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP @@ -453,6 +478,10 @@ HTTP request that is sent to the App Engine app handler. An App Engine task is a task that has [AppEngineHttpRequest][goog le.cloud.tasks.v2.AppEngineHttpRequest] set. + http_request: + HTTP request that is sent to the worker. An HTTP task is a + task that has [HttpRequest][google.cloud.tasks.v2.HttpRequest] + set. schedule_time: The time when the task is scheduled to be attempted or retried. ``schedule_time`` will be truncated to the nearest @@ -470,7 +499,10 @@ for the response, but whether the worker stops processing depends on the worker. For example, if the worker is stuck, it may not react to cancelled requests. The default and maximum - values depend on the type of request: - For [App Engine + values depend on the type of request: - For [HTTP + tasks][google.cloud.tasks.v2.HttpRequest], the default is + 10 minutes. The deadline must be in the interval [15 seconds, + 30 minutes]. - For [App Engine tasks][google.cloud.tasks.v2.AppEngineHttpRequest], 0 indicates that the request has the default deadline. The default deadline depends on the `scaling type diff --git a/tasks/google/cloud/tasks_v2beta2/gapic/cloud_tasks_client.py b/tasks/google/cloud/tasks_v2beta2/gapic/cloud_tasks_client.py index 8c0b02f1a4cb..651a67a0c228 100644 --- a/tasks/google/cloud/tasks_v2beta2/gapic/cloud_tasks_client.py +++ b/tasks/google/cloud/tasks_v2beta2/gapic/cloud_tasks_client.py @@ -287,13 +287,13 @@ def list_queues( streaming is performed per-page, this determines the maximum number of resources in a page. retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.api_core.page_iterator.PageIterator` instance. @@ -373,13 +373,13 @@ def get_queue( name (str): Required. The resource name of the queue. For example: ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2beta2.types.Queue` instance. @@ -466,13 +466,13 @@ def create_queue( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.tasks_v2beta2.types.Queue` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2beta2.types.Queue` instance. @@ -565,13 +565,13 @@ def update_queue( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.tasks_v2beta2.types.FieldMask` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2beta2.types.Queue` instance. @@ -648,13 +648,13 @@ def delete_queue( name (str): Required. The queue name. For example: ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Raises: google.api_core.exceptions.GoogleAPICallError: If the request @@ -720,13 +720,13 @@ def purge_queue( name (str): Required. The queue name. For example: ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2beta2.types.Queue` instance. @@ -794,13 +794,13 @@ def pause_queue( name (str): Required. The queue name. For example: ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2beta2.types.Queue` instance. @@ -873,13 +873,13 @@ def resume_queue( name (str): Required. The queue name. For example: ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2beta2.types.Queue` instance. @@ -956,13 +956,13 @@ def get_iam_policy( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.tasks_v2beta2.types.GetPolicyOptions` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2beta2.types.Policy` instance. @@ -1049,13 +1049,13 @@ def set_iam_policy( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.tasks_v2beta2.types.Policy` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2beta2.types.Policy` instance. @@ -1133,13 +1133,13 @@ def test_iam_permissions( information see `IAM Overview `__. retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2beta2.types.TestIamPermissionsResponse` instance. @@ -1242,13 +1242,13 @@ def list_tasks( streaming is performed per-page, this determines the maximum number of resources in a page. retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.api_core.page_iterator.PageIterator` instance. @@ -1340,13 +1340,13 @@ def get_task( `Google IAM `___ permission on the ``Task`` resource. retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2beta2.types.Task` instance. @@ -1466,13 +1466,13 @@ def create_task( `Google IAM `___ permission on the ``Task`` resource. retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2beta2.types.Task` instance. @@ -1542,13 +1542,13 @@ def delete_task( name (str): Required. The task name. For example: ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Raises: google.api_core.exceptions.GoogleAPICallError: If the request @@ -1693,13 +1693,13 @@ def lease_tasks( UTF-8 encoded can't be used in the ``filter`` and the task's ``tag`` will be displayed as empty in Cloud Tasks. retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2beta2.types.LeaseTasksResponse` instance. @@ -1788,13 +1788,13 @@ def acknowledge_task( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.tasks_v2beta2.types.Timestamp` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Raises: google.api_core.exceptions.GoogleAPICallError: If the request @@ -1895,13 +1895,13 @@ def renew_lease( `Google IAM `___ permission on the ``Task`` resource. retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2beta2.types.Task` instance. @@ -1997,13 +1997,13 @@ def cancel_lease( `Google IAM `___ permission on the ``Task`` resource. retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2beta2.types.Task` instance. @@ -2103,13 +2103,13 @@ def run_task( `Google IAM `___ permission on the ``Task`` resource. retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2beta2.types.Task` instance. diff --git a/tasks/google/cloud/tasks_v2beta2/proto/cloudtasks.proto b/tasks/google/cloud/tasks_v2beta2/proto/cloudtasks.proto index 79274fc37291..afea6919a625 100644 --- a/tasks/google/cloud/tasks_v2beta2/proto/cloudtasks.proto +++ b/tasks/google/cloud/tasks_v2beta2/proto/cloudtasks.proto @@ -396,7 +396,7 @@ message ListQueuesRequest { string parent = 1 [ (google.api.field_behavior) = REQUIRED, (google.api.resource_reference) = { - child_type: "locations.googleapis.com/Location" + child_type: "cloudtasks.googleapis.com/Queue" } ]; @@ -473,7 +473,7 @@ message CreateQueueRequest { string parent = 1 [ (google.api.field_behavior) = REQUIRED, (google.api.resource_reference) = { - child_type: "locations.googleapis.com/Location" + child_type: "cloudtasks.googleapis.com/Queue" } ]; @@ -555,7 +555,7 @@ message ListTasksRequest { string parent = 1 [ (google.api.field_behavior) = REQUIRED, (google.api.resource_reference) = { - child_type: "cloudtasks.googleapis.com/Queue" + child_type: "cloudtasks.googleapis.com/Task" } ]; @@ -645,7 +645,7 @@ message CreateTaskRequest { string parent = 1 [ (google.api.field_behavior) = REQUIRED, (google.api.resource_reference) = { - child_type: "cloudtasks.googleapis.com/Queue" + child_type: "cloudtasks.googleapis.com/Task" } ]; @@ -719,7 +719,7 @@ message LeaseTasksRequest { string parent = 1 [ (google.api.field_behavior) = REQUIRED, (google.api.resource_reference) = { - child_type: "cloudtasks.googleapis.com/Queue" + child_type: "cloudtasks.googleapis.com/Task" } ]; diff --git a/tasks/google/cloud/tasks_v2beta2/proto/cloudtasks_pb2.py b/tasks/google/cloud/tasks_v2beta2/proto/cloudtasks_pb2.py index b071f420cb7f..7d5555815a35 100644 --- a/tasks/google/cloud/tasks_v2beta2/proto/cloudtasks_pb2.py +++ b/tasks/google/cloud/tasks_v2beta2/proto/cloudtasks_pb2.py @@ -41,7 +41,7 @@ "\n\036com.google.cloud.tasks.v2beta2B\017CloudTasksProtoP\001Z?google.golang.org/genproto/googleapis/cloud/tasks/v2beta2;tasks\242\002\005TASKS" ), serialized_pb=_b( - '\n1google/cloud/tasks_v2beta2/proto/cloudtasks.proto\x12\x1agoogle.cloud.tasks.v2beta2\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a,google/cloud/tasks_v2beta2/proto/queue.proto\x1a+google/cloud/tasks_v2beta2/proto/task.proto\x1a\x1egoogle/iam/v1/iam_policy.proto\x1a\x1agoogle/iam/v1/policy.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\x85\x01\n\x11ListQueuesRequest\x12\x39\n\x06parent\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\x12!locations.googleapis.com/Location\x12\x0e\n\x06\x66ilter\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05\x12\x12\n\npage_token\x18\x04 \x01(\t"`\n\x12ListQueuesResponse\x12\x31\n\x06queues\x18\x01 \x03(\x0b\x32!.google.cloud.tasks.v2beta2.Queue\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"H\n\x0fGetQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"\x86\x01\n\x12\x43reateQueueRequest\x12\x39\n\x06parent\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\x12!locations.googleapis.com/Location\x12\x35\n\x05queue\x18\x02 \x01(\x0b\x32!.google.cloud.tasks.v2beta2.QueueB\x03\xe0\x41\x02"|\n\x12UpdateQueueRequest\x12\x35\n\x05queue\x18\x01 \x01(\x0b\x32!.google.cloud.tasks.v2beta2.QueueB\x03\xe0\x41\x02\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"K\n\x12\x44\x65leteQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"J\n\x11PurgeQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"J\n\x11PauseQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"K\n\x12ResumeQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"\xb0\x01\n\x10ListTasksRequest\x12\x37\n\x06parent\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\x12\x1f\x63loudtasks.googleapis.com/Queue\x12<\n\rresponse_view\x18\x02 \x01(\x0e\x32%.google.cloud.tasks.v2beta2.Task.View\x12\x11\n\tpage_size\x18\x04 \x01(\x05\x12\x12\n\npage_token\x18\x05 \x01(\t"]\n\x11ListTasksResponse\x12/\n\x05tasks\x18\x01 \x03(\x0b\x32 .google.cloud.tasks.v2beta2.Task\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"\x84\x01\n\x0eGetTaskRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1e\x63loudtasks.googleapis.com/Task\x12<\n\rresponse_view\x18\x02 \x01(\x0e\x32%.google.cloud.tasks.v2beta2.Task.View"\xbf\x01\n\x11\x43reateTaskRequest\x12\x37\n\x06parent\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\x12\x1f\x63loudtasks.googleapis.com/Queue\x12\x33\n\x04task\x18\x02 \x01(\x0b\x32 .google.cloud.tasks.v2beta2.TaskB\x03\xe0\x41\x02\x12<\n\rresponse_view\x18\x03 \x01(\x0e\x32%.google.cloud.tasks.v2beta2.Task.View"I\n\x11\x44\x65leteTaskRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1e\x63loudtasks.googleapis.com/Task"\xe5\x01\n\x11LeaseTasksRequest\x12\x37\n\x06parent\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\x12\x1f\x63loudtasks.googleapis.com/Queue\x12\x11\n\tmax_tasks\x18\x02 \x01(\x05\x12\x36\n\x0elease_duration\x18\x03 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x02\x12<\n\rresponse_view\x18\x04 \x01(\x0e\x32%.google.cloud.tasks.v2beta2.Task.View\x12\x0e\n\x06\x66ilter\x18\x05 \x01(\t"E\n\x12LeaseTasksResponse\x12/\n\x05tasks\x18\x01 \x03(\x0b\x32 .google.cloud.tasks.v2beta2.Task"\x86\x01\n\x16\x41\x63knowledgeTaskRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1e\x63loudtasks.googleapis.com/Task\x12\x36\n\rschedule_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x02"\xf7\x01\n\x11RenewLeaseRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1e\x63loudtasks.googleapis.com/Task\x12\x36\n\rschedule_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x02\x12\x36\n\x0elease_duration\x18\x03 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x02\x12<\n\rresponse_view\x18\x04 \x01(\x0e\x32%.google.cloud.tasks.v2beta2.Task.View"\xc0\x01\n\x12\x43\x61ncelLeaseRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1e\x63loudtasks.googleapis.com/Task\x12\x36\n\rschedule_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x02\x12<\n\rresponse_view\x18\x03 \x01(\x0e\x32%.google.cloud.tasks.v2beta2.Task.View"\x84\x01\n\x0eRunTaskRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1e\x63loudtasks.googleapis.com/Task\x12<\n\rresponse_view\x18\x02 \x01(\x0e\x32%.google.cloud.tasks.v2beta2.Task.View2\xd4\x1c\n\nCloudTasks\x12\xad\x01\n\nListQueues\x12-.google.cloud.tasks.v2beta2.ListQueuesRequest\x1a..google.cloud.tasks.v2beta2.ListQueuesResponse"@\x82\xd3\xe4\x93\x02\x31\x12//v2beta2/{parent=projects/*/locations/*}/queues\xda\x41\x06parent\x12\x9a\x01\n\x08GetQueue\x12+.google.cloud.tasks.v2beta2.GetQueueRequest\x1a!.google.cloud.tasks.v2beta2.Queue">\x82\xd3\xe4\x93\x02\x31\x12//v2beta2/{name=projects/*/locations/*/queues/*}\xda\x41\x04name\x12\xaf\x01\n\x0b\x43reateQueue\x12..google.cloud.tasks.v2beta2.CreateQueueRequest\x1a!.google.cloud.tasks.v2beta2.Queue"M\x82\xd3\xe4\x93\x02\x38"//v2beta2/{parent=projects/*/locations/*}/queues:\x05queue\xda\x41\x0cparent,queue\x12\xba\x01\n\x0bUpdateQueue\x12..google.cloud.tasks.v2beta2.UpdateQueueRequest\x1a!.google.cloud.tasks.v2beta2.Queue"X\x82\xd3\xe4\x93\x02>25/v2beta2/{queue.name=projects/*/locations/*/queues/*}:\x05queue\xda\x41\x11queue,update_mask\x12\x95\x01\n\x0b\x44\x65leteQueue\x12..google.cloud.tasks.v2beta2.DeleteQueueRequest\x1a\x16.google.protobuf.Empty">\x82\xd3\xe4\x93\x02\x31*//v2beta2/{name=projects/*/locations/*/queues/*}\xda\x41\x04name\x12\xa7\x01\n\nPurgeQueue\x12-.google.cloud.tasks.v2beta2.PurgeQueueRequest\x1a!.google.cloud.tasks.v2beta2.Queue"G\x82\xd3\xe4\x93\x02:"5/v2beta2/{name=projects/*/locations/*/queues/*}:purge:\x01*\xda\x41\x04name\x12\xa7\x01\n\nPauseQueue\x12-.google.cloud.tasks.v2beta2.PauseQueueRequest\x1a!.google.cloud.tasks.v2beta2.Queue"G\x82\xd3\xe4\x93\x02:"5/v2beta2/{name=projects/*/locations/*/queues/*}:pause:\x01*\xda\x41\x04name\x12\xaa\x01\n\x0bResumeQueue\x12..google.cloud.tasks.v2beta2.ResumeQueueRequest\x1a!.google.cloud.tasks.v2beta2.Queue"H\x82\xd3\xe4\x93\x02;"6/v2beta2/{name=projects/*/locations/*/queues/*}:resume:\x01*\xda\x41\x04name\x12\xa1\x01\n\x0cGetIamPolicy\x12".google.iam.v1.GetIamPolicyRequest\x1a\x15.google.iam.v1.Policy"V\x82\xd3\xe4\x93\x02\x45"@/v2beta2/{resource=projects/*/locations/*/queues/*}:getIamPolicy:\x01*\xda\x41\x08resource\x12\xa8\x01\n\x0cSetIamPolicy\x12".google.iam.v1.SetIamPolicyRequest\x1a\x15.google.iam.v1.Policy"]\x82\xd3\xe4\x93\x02\x45"@/v2beta2/{resource=projects/*/locations/*/queues/*}:setIamPolicy:\x01*\xda\x41\x0fresource,policy\x12\xd3\x01\n\x12TestIamPermissions\x12(.google.iam.v1.TestIamPermissionsRequest\x1a).google.iam.v1.TestIamPermissionsResponse"h\x82\xd3\xe4\x93\x02K"F/v2beta2/{resource=projects/*/locations/*/queues/*}:testIamPermissions:\x01*\xda\x41\x14resource,permissions\x12\xb2\x01\n\tListTasks\x12,.google.cloud.tasks.v2beta2.ListTasksRequest\x1a-.google.cloud.tasks.v2beta2.ListTasksResponse"H\x82\xd3\xe4\x93\x02\x39\x12\x37/v2beta2/{parent=projects/*/locations/*/queues/*}/tasks\xda\x41\x06parent\x12\x9f\x01\n\x07GetTask\x12*.google.cloud.tasks.v2beta2.GetTaskRequest\x1a .google.cloud.tasks.v2beta2.Task"F\x82\xd3\xe4\x93\x02\x39\x12\x37/v2beta2/{name=projects/*/locations/*/queues/*/tasks/*}\xda\x41\x04name\x12\xaf\x01\n\nCreateTask\x12-.google.cloud.tasks.v2beta2.CreateTaskRequest\x1a .google.cloud.tasks.v2beta2.Task"P\x82\xd3\xe4\x93\x02<"7/v2beta2/{parent=projects/*/locations/*/queues/*}/tasks:\x01*\xda\x41\x0bparent,task\x12\x9b\x01\n\nDeleteTask\x12-.google.cloud.tasks.v2beta2.DeleteTaskRequest\x1a\x16.google.protobuf.Empty"F\x82\xd3\xe4\x93\x02\x39*7/v2beta2/{name=projects/*/locations/*/queues/*/tasks/*}\xda\x41\x04name\x12\xcd\x01\n\nLeaseTasks\x12-.google.cloud.tasks.v2beta2.LeaseTasksRequest\x1a..google.cloud.tasks.v2beta2.LeaseTasksResponse"`\x82\xd3\xe4\x93\x02\x42"=/v2beta2/{parent=projects/*/locations/*/queues/*}/tasks:lease:\x01*\xda\x41\x15parent,lease_duration\x12\xc2\x01\n\x0f\x41\x63knowledgeTask\x12\x32.google.cloud.tasks.v2beta2.AcknowledgeTaskRequest\x1a\x16.google.protobuf.Empty"c\x82\xd3\xe4\x93\x02H"C/v2beta2/{name=projects/*/locations/*/queues/*/tasks/*}:acknowledge:\x01*\xda\x41\x12name,schedule_time\x12\xd0\x01\n\nRenewLease\x12-.google.cloud.tasks.v2beta2.RenewLeaseRequest\x1a .google.cloud.tasks.v2beta2.Task"q\x82\xd3\xe4\x93\x02G"B/v2beta2/{name=projects/*/locations/*/queues/*/tasks/*}:renewLease:\x01*\xda\x41!name,schedule_time,lease_duration\x12\xc4\x01\n\x0b\x43\x61ncelLease\x12..google.cloud.tasks.v2beta2.CancelLeaseRequest\x1a .google.cloud.tasks.v2beta2.Task"c\x82\xd3\xe4\x93\x02H"C/v2beta2/{name=projects/*/locations/*/queues/*/tasks/*}:cancelLease:\x01*\xda\x41\x12name,schedule_time\x12\xa6\x01\n\x07RunTask\x12*.google.cloud.tasks.v2beta2.RunTaskRequest\x1a .google.cloud.tasks.v2beta2.Task"M\x82\xd3\xe4\x93\x02@";/v2beta2/{name=projects/*/locations/*/queues/*/tasks/*}:run:\x01*\xda\x41\x04name\x1aM\xca\x41\x19\x63loudtasks.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformB|\n\x1e\x63om.google.cloud.tasks.v2beta2B\x0f\x43loudTasksProtoP\x01Z?google.golang.org/genproto/googleapis/cloud/tasks/v2beta2;tasks\xa2\x02\x05TASKSb\x06proto3' + '\n1google/cloud/tasks_v2beta2/proto/cloudtasks.proto\x12\x1agoogle.cloud.tasks.v2beta2\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a,google/cloud/tasks_v2beta2/proto/queue.proto\x1a+google/cloud/tasks_v2beta2/proto/task.proto\x1a\x1egoogle/iam/v1/iam_policy.proto\x1a\x1agoogle/iam/v1/policy.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\x83\x01\n\x11ListQueuesRequest\x12\x37\n\x06parent\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\x12\x1f\x63loudtasks.googleapis.com/Queue\x12\x0e\n\x06\x66ilter\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05\x12\x12\n\npage_token\x18\x04 \x01(\t"`\n\x12ListQueuesResponse\x12\x31\n\x06queues\x18\x01 \x03(\x0b\x32!.google.cloud.tasks.v2beta2.Queue\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"H\n\x0fGetQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"\x84\x01\n\x12\x43reateQueueRequest\x12\x37\n\x06parent\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\x12\x1f\x63loudtasks.googleapis.com/Queue\x12\x35\n\x05queue\x18\x02 \x01(\x0b\x32!.google.cloud.tasks.v2beta2.QueueB\x03\xe0\x41\x02"|\n\x12UpdateQueueRequest\x12\x35\n\x05queue\x18\x01 \x01(\x0b\x32!.google.cloud.tasks.v2beta2.QueueB\x03\xe0\x41\x02\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"K\n\x12\x44\x65leteQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"J\n\x11PurgeQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"J\n\x11PauseQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"K\n\x12ResumeQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"\xaf\x01\n\x10ListTasksRequest\x12\x36\n\x06parent\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \x12\x1e\x63loudtasks.googleapis.com/Task\x12<\n\rresponse_view\x18\x02 \x01(\x0e\x32%.google.cloud.tasks.v2beta2.Task.View\x12\x11\n\tpage_size\x18\x04 \x01(\x05\x12\x12\n\npage_token\x18\x05 \x01(\t"]\n\x11ListTasksResponse\x12/\n\x05tasks\x18\x01 \x03(\x0b\x32 .google.cloud.tasks.v2beta2.Task\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"\x84\x01\n\x0eGetTaskRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1e\x63loudtasks.googleapis.com/Task\x12<\n\rresponse_view\x18\x02 \x01(\x0e\x32%.google.cloud.tasks.v2beta2.Task.View"\xbe\x01\n\x11\x43reateTaskRequest\x12\x36\n\x06parent\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \x12\x1e\x63loudtasks.googleapis.com/Task\x12\x33\n\x04task\x18\x02 \x01(\x0b\x32 .google.cloud.tasks.v2beta2.TaskB\x03\xe0\x41\x02\x12<\n\rresponse_view\x18\x03 \x01(\x0e\x32%.google.cloud.tasks.v2beta2.Task.View"I\n\x11\x44\x65leteTaskRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1e\x63loudtasks.googleapis.com/Task"\xe4\x01\n\x11LeaseTasksRequest\x12\x36\n\x06parent\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \x12\x1e\x63loudtasks.googleapis.com/Task\x12\x11\n\tmax_tasks\x18\x02 \x01(\x05\x12\x36\n\x0elease_duration\x18\x03 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x02\x12<\n\rresponse_view\x18\x04 \x01(\x0e\x32%.google.cloud.tasks.v2beta2.Task.View\x12\x0e\n\x06\x66ilter\x18\x05 \x01(\t"E\n\x12LeaseTasksResponse\x12/\n\x05tasks\x18\x01 \x03(\x0b\x32 .google.cloud.tasks.v2beta2.Task"\x86\x01\n\x16\x41\x63knowledgeTaskRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1e\x63loudtasks.googleapis.com/Task\x12\x36\n\rschedule_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x02"\xf7\x01\n\x11RenewLeaseRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1e\x63loudtasks.googleapis.com/Task\x12\x36\n\rschedule_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x02\x12\x36\n\x0elease_duration\x18\x03 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x02\x12<\n\rresponse_view\x18\x04 \x01(\x0e\x32%.google.cloud.tasks.v2beta2.Task.View"\xc0\x01\n\x12\x43\x61ncelLeaseRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1e\x63loudtasks.googleapis.com/Task\x12\x36\n\rschedule_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x02\x12<\n\rresponse_view\x18\x03 \x01(\x0e\x32%.google.cloud.tasks.v2beta2.Task.View"\x84\x01\n\x0eRunTaskRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1e\x63loudtasks.googleapis.com/Task\x12<\n\rresponse_view\x18\x02 \x01(\x0e\x32%.google.cloud.tasks.v2beta2.Task.View2\xd4\x1c\n\nCloudTasks\x12\xad\x01\n\nListQueues\x12-.google.cloud.tasks.v2beta2.ListQueuesRequest\x1a..google.cloud.tasks.v2beta2.ListQueuesResponse"@\x82\xd3\xe4\x93\x02\x31\x12//v2beta2/{parent=projects/*/locations/*}/queues\xda\x41\x06parent\x12\x9a\x01\n\x08GetQueue\x12+.google.cloud.tasks.v2beta2.GetQueueRequest\x1a!.google.cloud.tasks.v2beta2.Queue">\x82\xd3\xe4\x93\x02\x31\x12//v2beta2/{name=projects/*/locations/*/queues/*}\xda\x41\x04name\x12\xaf\x01\n\x0b\x43reateQueue\x12..google.cloud.tasks.v2beta2.CreateQueueRequest\x1a!.google.cloud.tasks.v2beta2.Queue"M\x82\xd3\xe4\x93\x02\x38"//v2beta2/{parent=projects/*/locations/*}/queues:\x05queue\xda\x41\x0cparent,queue\x12\xba\x01\n\x0bUpdateQueue\x12..google.cloud.tasks.v2beta2.UpdateQueueRequest\x1a!.google.cloud.tasks.v2beta2.Queue"X\x82\xd3\xe4\x93\x02>25/v2beta2/{queue.name=projects/*/locations/*/queues/*}:\x05queue\xda\x41\x11queue,update_mask\x12\x95\x01\n\x0b\x44\x65leteQueue\x12..google.cloud.tasks.v2beta2.DeleteQueueRequest\x1a\x16.google.protobuf.Empty">\x82\xd3\xe4\x93\x02\x31*//v2beta2/{name=projects/*/locations/*/queues/*}\xda\x41\x04name\x12\xa7\x01\n\nPurgeQueue\x12-.google.cloud.tasks.v2beta2.PurgeQueueRequest\x1a!.google.cloud.tasks.v2beta2.Queue"G\x82\xd3\xe4\x93\x02:"5/v2beta2/{name=projects/*/locations/*/queues/*}:purge:\x01*\xda\x41\x04name\x12\xa7\x01\n\nPauseQueue\x12-.google.cloud.tasks.v2beta2.PauseQueueRequest\x1a!.google.cloud.tasks.v2beta2.Queue"G\x82\xd3\xe4\x93\x02:"5/v2beta2/{name=projects/*/locations/*/queues/*}:pause:\x01*\xda\x41\x04name\x12\xaa\x01\n\x0bResumeQueue\x12..google.cloud.tasks.v2beta2.ResumeQueueRequest\x1a!.google.cloud.tasks.v2beta2.Queue"H\x82\xd3\xe4\x93\x02;"6/v2beta2/{name=projects/*/locations/*/queues/*}:resume:\x01*\xda\x41\x04name\x12\xa1\x01\n\x0cGetIamPolicy\x12".google.iam.v1.GetIamPolicyRequest\x1a\x15.google.iam.v1.Policy"V\x82\xd3\xe4\x93\x02\x45"@/v2beta2/{resource=projects/*/locations/*/queues/*}:getIamPolicy:\x01*\xda\x41\x08resource\x12\xa8\x01\n\x0cSetIamPolicy\x12".google.iam.v1.SetIamPolicyRequest\x1a\x15.google.iam.v1.Policy"]\x82\xd3\xe4\x93\x02\x45"@/v2beta2/{resource=projects/*/locations/*/queues/*}:setIamPolicy:\x01*\xda\x41\x0fresource,policy\x12\xd3\x01\n\x12TestIamPermissions\x12(.google.iam.v1.TestIamPermissionsRequest\x1a).google.iam.v1.TestIamPermissionsResponse"h\x82\xd3\xe4\x93\x02K"F/v2beta2/{resource=projects/*/locations/*/queues/*}:testIamPermissions:\x01*\xda\x41\x14resource,permissions\x12\xb2\x01\n\tListTasks\x12,.google.cloud.tasks.v2beta2.ListTasksRequest\x1a-.google.cloud.tasks.v2beta2.ListTasksResponse"H\x82\xd3\xe4\x93\x02\x39\x12\x37/v2beta2/{parent=projects/*/locations/*/queues/*}/tasks\xda\x41\x06parent\x12\x9f\x01\n\x07GetTask\x12*.google.cloud.tasks.v2beta2.GetTaskRequest\x1a .google.cloud.tasks.v2beta2.Task"F\x82\xd3\xe4\x93\x02\x39\x12\x37/v2beta2/{name=projects/*/locations/*/queues/*/tasks/*}\xda\x41\x04name\x12\xaf\x01\n\nCreateTask\x12-.google.cloud.tasks.v2beta2.CreateTaskRequest\x1a .google.cloud.tasks.v2beta2.Task"P\x82\xd3\xe4\x93\x02<"7/v2beta2/{parent=projects/*/locations/*/queues/*}/tasks:\x01*\xda\x41\x0bparent,task\x12\x9b\x01\n\nDeleteTask\x12-.google.cloud.tasks.v2beta2.DeleteTaskRequest\x1a\x16.google.protobuf.Empty"F\x82\xd3\xe4\x93\x02\x39*7/v2beta2/{name=projects/*/locations/*/queues/*/tasks/*}\xda\x41\x04name\x12\xcd\x01\n\nLeaseTasks\x12-.google.cloud.tasks.v2beta2.LeaseTasksRequest\x1a..google.cloud.tasks.v2beta2.LeaseTasksResponse"`\x82\xd3\xe4\x93\x02\x42"=/v2beta2/{parent=projects/*/locations/*/queues/*}/tasks:lease:\x01*\xda\x41\x15parent,lease_duration\x12\xc2\x01\n\x0f\x41\x63knowledgeTask\x12\x32.google.cloud.tasks.v2beta2.AcknowledgeTaskRequest\x1a\x16.google.protobuf.Empty"c\x82\xd3\xe4\x93\x02H"C/v2beta2/{name=projects/*/locations/*/queues/*/tasks/*}:acknowledge:\x01*\xda\x41\x12name,schedule_time\x12\xd0\x01\n\nRenewLease\x12-.google.cloud.tasks.v2beta2.RenewLeaseRequest\x1a .google.cloud.tasks.v2beta2.Task"q\x82\xd3\xe4\x93\x02G"B/v2beta2/{name=projects/*/locations/*/queues/*/tasks/*}:renewLease:\x01*\xda\x41!name,schedule_time,lease_duration\x12\xc4\x01\n\x0b\x43\x61ncelLease\x12..google.cloud.tasks.v2beta2.CancelLeaseRequest\x1a .google.cloud.tasks.v2beta2.Task"c\x82\xd3\xe4\x93\x02H"C/v2beta2/{name=projects/*/locations/*/queues/*/tasks/*}:cancelLease:\x01*\xda\x41\x12name,schedule_time\x12\xa6\x01\n\x07RunTask\x12*.google.cloud.tasks.v2beta2.RunTaskRequest\x1a .google.cloud.tasks.v2beta2.Task"M\x82\xd3\xe4\x93\x02@";/v2beta2/{name=projects/*/locations/*/queues/*/tasks/*}:run:\x01*\xda\x41\x04name\x1aM\xca\x41\x19\x63loudtasks.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformB|\n\x1e\x63om.google.cloud.tasks.v2beta2B\x0f\x43loudTasksProtoP\x01Z?google.golang.org/genproto/googleapis/cloud/tasks/v2beta2;tasks\xa2\x02\x05TASKSb\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, @@ -83,7 +83,7 @@ is_extension=False, extension_scope=None, serialized_options=_b( - "\340A\002\372A#\022!locations.googleapis.com/Location" + "\340A\002\372A!\022\037cloudtasks.googleapis.com/Queue" ), file=DESCRIPTOR, ), @@ -151,7 +151,7 @@ extension_ranges=[], oneofs=[], serialized_start=476, - serialized_end=609, + serialized_end=607, ) @@ -207,8 +207,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=611, - serialized_end=707, + serialized_start=609, + serialized_end=705, ) @@ -248,8 +248,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=709, - serialized_end=781, + serialized_start=707, + serialized_end=779, ) @@ -276,7 +276,7 @@ is_extension=False, extension_scope=None, serialized_options=_b( - "\340A\002\372A#\022!locations.googleapis.com/Location" + "\340A\002\372A!\022\037cloudtasks.googleapis.com/Queue" ), file=DESCRIPTOR, ), @@ -307,8 +307,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=784, - serialized_end=918, + serialized_start=782, + serialized_end=914, ) @@ -364,8 +364,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=920, - serialized_end=1044, + serialized_start=916, + serialized_end=1040, ) @@ -405,8 +405,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1046, - serialized_end=1121, + serialized_start=1042, + serialized_end=1117, ) @@ -446,8 +446,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1123, - serialized_end=1197, + serialized_start=1119, + serialized_end=1193, ) @@ -487,8 +487,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1199, - serialized_end=1273, + serialized_start=1195, + serialized_end=1269, ) @@ -528,8 +528,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1275, - serialized_end=1350, + serialized_start=1271, + serialized_end=1346, ) @@ -556,7 +556,7 @@ is_extension=False, extension_scope=None, serialized_options=_b( - "\340A\002\372A!\022\037cloudtasks.googleapis.com/Queue" + "\340A\002\372A \022\036cloudtasks.googleapis.com/Task" ), file=DESCRIPTOR, ), @@ -623,8 +623,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1353, - serialized_end=1529, + serialized_start=1349, + serialized_end=1524, ) @@ -680,8 +680,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1531, - serialized_end=1624, + serialized_start=1526, + serialized_end=1619, ) @@ -739,8 +739,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1627, - serialized_end=1759, + serialized_start=1622, + serialized_end=1754, ) @@ -767,7 +767,7 @@ is_extension=False, extension_scope=None, serialized_options=_b( - "\340A\002\372A!\022\037cloudtasks.googleapis.com/Queue" + "\340A\002\372A \022\036cloudtasks.googleapis.com/Task" ), file=DESCRIPTOR, ), @@ -816,8 +816,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1762, - serialized_end=1953, + serialized_start=1757, + serialized_end=1947, ) @@ -857,8 +857,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1955, - serialized_end=2028, + serialized_start=1949, + serialized_end=2022, ) @@ -885,7 +885,7 @@ is_extension=False, extension_scope=None, serialized_options=_b( - "\340A\002\372A!\022\037cloudtasks.googleapis.com/Queue" + "\340A\002\372A \022\036cloudtasks.googleapis.com/Task" ), file=DESCRIPTOR, ), @@ -970,8 +970,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2031, - serialized_end=2260, + serialized_start=2025, + serialized_end=2253, ) @@ -1009,8 +1009,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2262, - serialized_end=2331, + serialized_start=2255, + serialized_end=2324, ) @@ -1068,8 +1068,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2334, - serialized_end=2468, + serialized_start=2327, + serialized_end=2461, ) @@ -1163,8 +1163,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2471, - serialized_end=2718, + serialized_start=2464, + serialized_end=2711, ) @@ -1240,8 +1240,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2721, - serialized_end=2913, + serialized_start=2714, + serialized_end=2906, ) @@ -1299,8 +1299,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2916, - serialized_end=3048, + serialized_start=2909, + serialized_end=3041, ) _LISTQUEUESRESPONSE.fields_by_name[ @@ -2092,8 +2092,8 @@ serialized_options=_b( "\312A\031cloudtasks.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform" ), - serialized_start=3051, - serialized_end=6719, + serialized_start=3044, + serialized_end=6712, methods=[ _descriptor.MethodDescriptor( name="ListQueues", diff --git a/tasks/google/cloud/tasks_v2beta3/gapic/cloud_tasks_client.py b/tasks/google/cloud/tasks_v2beta3/gapic/cloud_tasks_client.py index 51d6d9ac4b21..b018df9b510e 100644 --- a/tasks/google/cloud/tasks_v2beta3/gapic/cloud_tasks_client.py +++ b/tasks/google/cloud/tasks_v2beta3/gapic/cloud_tasks_client.py @@ -285,13 +285,13 @@ def list_queues( streaming is performed per-page, this determines the maximum number of resources in a page. retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.api_core.page_iterator.PageIterator` instance. @@ -371,13 +371,13 @@ def get_queue( name (str): Required. The resource name of the queue. For example: ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2beta3.types.Queue` instance. @@ -473,13 +473,13 @@ def create_queue( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.tasks_v2beta3.types.Queue` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2beta3.types.Queue` instance. @@ -581,13 +581,13 @@ def update_queue( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.tasks_v2beta3.types.FieldMask` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2beta3.types.Queue` instance. @@ -664,13 +664,13 @@ def delete_queue( name (str): Required. The queue name. For example: ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Raises: google.api_core.exceptions.GoogleAPICallError: If the request @@ -736,13 +736,13 @@ def purge_queue( name (str): Required. The queue name. For example: ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2beta3.types.Queue` instance. @@ -810,13 +810,13 @@ def pause_queue( name (str): Required. The queue name. For example: ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2beta3.types.Queue` instance. @@ -889,13 +889,13 @@ def resume_queue( name (str): Required. The queue name. For example: ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2beta3.types.Queue` instance. @@ -972,13 +972,13 @@ def get_iam_policy( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.tasks_v2beta3.types.GetPolicyOptions` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2beta3.types.Policy` instance. @@ -1065,13 +1065,13 @@ def set_iam_policy( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.tasks_v2beta3.types.Policy` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2beta3.types.Policy` instance. @@ -1149,13 +1149,13 @@ def test_iam_permissions( information see `IAM Overview `__. retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2beta3.types.TestIamPermissionsResponse` instance. @@ -1258,13 +1258,13 @@ def list_tasks( streaming is performed per-page, this determines the maximum number of resources in a page. retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.api_core.page_iterator.PageIterator` instance. @@ -1356,13 +1356,13 @@ def get_task( `Google IAM `___ permission on the ``Task`` resource. retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2beta3.types.Task` instance. @@ -1481,13 +1481,13 @@ def create_task( `Google IAM `___ permission on the ``Task`` resource. retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2beta3.types.Task` instance. @@ -1557,13 +1557,13 @@ def delete_task( name (str): Required. The task name. For example: ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Raises: google.api_core.exceptions.GoogleAPICallError: If the request @@ -1656,13 +1656,13 @@ def run_task( `Google IAM `___ permission on the ``Task`` resource. retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is + for the client library request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. + that is provided to the client library method. Returns: A :class:`~google.cloud.tasks_v2beta3.types.Task` instance. diff --git a/tasks/google/cloud/tasks_v2beta3/proto/cloudtasks.proto b/tasks/google/cloud/tasks_v2beta3/proto/cloudtasks.proto index 431d5a73fc7a..711c7a616674 100644 --- a/tasks/google/cloud/tasks_v2beta3/proto/cloudtasks.proto +++ b/tasks/google/cloud/tasks_v2beta3/proto/cloudtasks.proto @@ -311,7 +311,7 @@ message ListQueuesRequest { string parent = 1 [ (google.api.field_behavior) = REQUIRED, (google.api.resource_reference) = { - child_type: "locations.googleapis.com/Location" + child_type: "cloudtasks.googleapis.com/Queue" } ]; @@ -388,7 +388,7 @@ message CreateQueueRequest { string parent = 1 [ (google.api.field_behavior) = REQUIRED, (google.api.resource_reference) = { - child_type: "locations.googleapis.com/Location" + child_type: "cloudtasks.googleapis.com/Queue" } ]; @@ -470,7 +470,7 @@ message ListTasksRequest { string parent = 1 [ (google.api.field_behavior) = REQUIRED, (google.api.resource_reference) = { - child_type: "cloudtasks.googleapis.com/Queue" + child_type: "cloudtasks.googleapis.com/Task" } ]; @@ -560,7 +560,7 @@ message CreateTaskRequest { string parent = 1 [ (google.api.field_behavior) = REQUIRED, (google.api.resource_reference) = { - child_type: "cloudtasks.googleapis.com/Queue" + child_type: "cloudtasks.googleapis.com/Task" } ]; diff --git a/tasks/google/cloud/tasks_v2beta3/proto/cloudtasks_pb2.py b/tasks/google/cloud/tasks_v2beta3/proto/cloudtasks_pb2.py index 45decbad7bfb..051f5a6ef8c9 100644 --- a/tasks/google/cloud/tasks_v2beta3/proto/cloudtasks_pb2.py +++ b/tasks/google/cloud/tasks_v2beta3/proto/cloudtasks_pb2.py @@ -39,7 +39,7 @@ "\n\036com.google.cloud.tasks.v2beta3B\017CloudTasksProtoP\001Z?google.golang.org/genproto/googleapis/cloud/tasks/v2beta3;tasks\242\002\005TASKS" ), serialized_pb=_b( - '\n1google/cloud/tasks_v2beta3/proto/cloudtasks.proto\x12\x1agoogle.cloud.tasks.v2beta3\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a,google/cloud/tasks_v2beta3/proto/queue.proto\x1a+google/cloud/tasks_v2beta3/proto/task.proto\x1a\x1egoogle/iam/v1/iam_policy.proto\x1a\x1agoogle/iam/v1/policy.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto"\x85\x01\n\x11ListQueuesRequest\x12\x39\n\x06parent\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\x12!locations.googleapis.com/Location\x12\x0e\n\x06\x66ilter\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05\x12\x12\n\npage_token\x18\x04 \x01(\t"`\n\x12ListQueuesResponse\x12\x31\n\x06queues\x18\x01 \x03(\x0b\x32!.google.cloud.tasks.v2beta3.Queue\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"H\n\x0fGetQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"\x86\x01\n\x12\x43reateQueueRequest\x12\x39\n\x06parent\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\x12!locations.googleapis.com/Location\x12\x35\n\x05queue\x18\x02 \x01(\x0b\x32!.google.cloud.tasks.v2beta3.QueueB\x03\xe0\x41\x02"|\n\x12UpdateQueueRequest\x12\x35\n\x05queue\x18\x01 \x01(\x0b\x32!.google.cloud.tasks.v2beta3.QueueB\x03\xe0\x41\x02\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"K\n\x12\x44\x65leteQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"J\n\x11PurgeQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"J\n\x11PauseQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"K\n\x12ResumeQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"\xb0\x01\n\x10ListTasksRequest\x12\x37\n\x06parent\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\x12\x1f\x63loudtasks.googleapis.com/Queue\x12<\n\rresponse_view\x18\x02 \x01(\x0e\x32%.google.cloud.tasks.v2beta3.Task.View\x12\x11\n\tpage_size\x18\x03 \x01(\x05\x12\x12\n\npage_token\x18\x04 \x01(\t"]\n\x11ListTasksResponse\x12/\n\x05tasks\x18\x01 \x03(\x0b\x32 .google.cloud.tasks.v2beta3.Task\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"\x84\x01\n\x0eGetTaskRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1e\x63loudtasks.googleapis.com/Task\x12<\n\rresponse_view\x18\x02 \x01(\x0e\x32%.google.cloud.tasks.v2beta3.Task.View"\xbf\x01\n\x11\x43reateTaskRequest\x12\x37\n\x06parent\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\x12\x1f\x63loudtasks.googleapis.com/Queue\x12\x33\n\x04task\x18\x02 \x01(\x0b\x32 .google.cloud.tasks.v2beta3.TaskB\x03\xe0\x41\x02\x12<\n\rresponse_view\x18\x03 \x01(\x0e\x32%.google.cloud.tasks.v2beta3.Task.View"I\n\x11\x44\x65leteTaskRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1e\x63loudtasks.googleapis.com/Task"\x84\x01\n\x0eRunTaskRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1e\x63loudtasks.googleapis.com/Task\x12<\n\rresponse_view\x18\x02 \x01(\x0e\x32%.google.cloud.tasks.v2beta3.Task.View2\xa5\x16\n\nCloudTasks\x12\xad\x01\n\nListQueues\x12-.google.cloud.tasks.v2beta3.ListQueuesRequest\x1a..google.cloud.tasks.v2beta3.ListQueuesResponse"@\x82\xd3\xe4\x93\x02\x31\x12//v2beta3/{parent=projects/*/locations/*}/queues\xda\x41\x06parent\x12\x9a\x01\n\x08GetQueue\x12+.google.cloud.tasks.v2beta3.GetQueueRequest\x1a!.google.cloud.tasks.v2beta3.Queue">\x82\xd3\xe4\x93\x02\x31\x12//v2beta3/{name=projects/*/locations/*/queues/*}\xda\x41\x04name\x12\xaf\x01\n\x0b\x43reateQueue\x12..google.cloud.tasks.v2beta3.CreateQueueRequest\x1a!.google.cloud.tasks.v2beta3.Queue"M\x82\xd3\xe4\x93\x02\x38"//v2beta3/{parent=projects/*/locations/*}/queues:\x05queue\xda\x41\x0cparent,queue\x12\xba\x01\n\x0bUpdateQueue\x12..google.cloud.tasks.v2beta3.UpdateQueueRequest\x1a!.google.cloud.tasks.v2beta3.Queue"X\x82\xd3\xe4\x93\x02>25/v2beta3/{queue.name=projects/*/locations/*/queues/*}:\x05queue\xda\x41\x11queue,update_mask\x12\x95\x01\n\x0b\x44\x65leteQueue\x12..google.cloud.tasks.v2beta3.DeleteQueueRequest\x1a\x16.google.protobuf.Empty">\x82\xd3\xe4\x93\x02\x31*//v2beta3/{name=projects/*/locations/*/queues/*}\xda\x41\x04name\x12\xa7\x01\n\nPurgeQueue\x12-.google.cloud.tasks.v2beta3.PurgeQueueRequest\x1a!.google.cloud.tasks.v2beta3.Queue"G\x82\xd3\xe4\x93\x02:"5/v2beta3/{name=projects/*/locations/*/queues/*}:purge:\x01*\xda\x41\x04name\x12\xa7\x01\n\nPauseQueue\x12-.google.cloud.tasks.v2beta3.PauseQueueRequest\x1a!.google.cloud.tasks.v2beta3.Queue"G\x82\xd3\xe4\x93\x02:"5/v2beta3/{name=projects/*/locations/*/queues/*}:pause:\x01*\xda\x41\x04name\x12\xaa\x01\n\x0bResumeQueue\x12..google.cloud.tasks.v2beta3.ResumeQueueRequest\x1a!.google.cloud.tasks.v2beta3.Queue"H\x82\xd3\xe4\x93\x02;"6/v2beta3/{name=projects/*/locations/*/queues/*}:resume:\x01*\xda\x41\x04name\x12\xa1\x01\n\x0cGetIamPolicy\x12".google.iam.v1.GetIamPolicyRequest\x1a\x15.google.iam.v1.Policy"V\x82\xd3\xe4\x93\x02\x45"@/v2beta3/{resource=projects/*/locations/*/queues/*}:getIamPolicy:\x01*\xda\x41\x08resource\x12\xa8\x01\n\x0cSetIamPolicy\x12".google.iam.v1.SetIamPolicyRequest\x1a\x15.google.iam.v1.Policy"]\x82\xd3\xe4\x93\x02\x45"@/v2beta3/{resource=projects/*/locations/*/queues/*}:setIamPolicy:\x01*\xda\x41\x0fresource,policy\x12\xd3\x01\n\x12TestIamPermissions\x12(.google.iam.v1.TestIamPermissionsRequest\x1a).google.iam.v1.TestIamPermissionsResponse"h\x82\xd3\xe4\x93\x02K"F/v2beta3/{resource=projects/*/locations/*/queues/*}:testIamPermissions:\x01*\xda\x41\x14resource,permissions\x12\xb2\x01\n\tListTasks\x12,.google.cloud.tasks.v2beta3.ListTasksRequest\x1a-.google.cloud.tasks.v2beta3.ListTasksResponse"H\x82\xd3\xe4\x93\x02\x39\x12\x37/v2beta3/{parent=projects/*/locations/*/queues/*}/tasks\xda\x41\x06parent\x12\x9f\x01\n\x07GetTask\x12*.google.cloud.tasks.v2beta3.GetTaskRequest\x1a .google.cloud.tasks.v2beta3.Task"F\x82\xd3\xe4\x93\x02\x39\x12\x37/v2beta3/{name=projects/*/locations/*/queues/*/tasks/*}\xda\x41\x04name\x12\xaf\x01\n\nCreateTask\x12-.google.cloud.tasks.v2beta3.CreateTaskRequest\x1a .google.cloud.tasks.v2beta3.Task"P\x82\xd3\xe4\x93\x02<"7/v2beta3/{parent=projects/*/locations/*/queues/*}/tasks:\x01*\xda\x41\x0bparent,task\x12\x9b\x01\n\nDeleteTask\x12-.google.cloud.tasks.v2beta3.DeleteTaskRequest\x1a\x16.google.protobuf.Empty"F\x82\xd3\xe4\x93\x02\x39*7/v2beta3/{name=projects/*/locations/*/queues/*/tasks/*}\xda\x41\x04name\x12\xa6\x01\n\x07RunTask\x12*.google.cloud.tasks.v2beta3.RunTaskRequest\x1a .google.cloud.tasks.v2beta3.Task"M\x82\xd3\xe4\x93\x02@";/v2beta3/{name=projects/*/locations/*/queues/*/tasks/*}:run:\x01*\xda\x41\x04name\x1aM\xca\x41\x19\x63loudtasks.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformB|\n\x1e\x63om.google.cloud.tasks.v2beta3B\x0f\x43loudTasksProtoP\x01Z?google.golang.org/genproto/googleapis/cloud/tasks/v2beta3;tasks\xa2\x02\x05TASKSb\x06proto3' + '\n1google/cloud/tasks_v2beta3/proto/cloudtasks.proto\x12\x1agoogle.cloud.tasks.v2beta3\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a,google/cloud/tasks_v2beta3/proto/queue.proto\x1a+google/cloud/tasks_v2beta3/proto/task.proto\x1a\x1egoogle/iam/v1/iam_policy.proto\x1a\x1agoogle/iam/v1/policy.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto"\x83\x01\n\x11ListQueuesRequest\x12\x37\n\x06parent\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\x12\x1f\x63loudtasks.googleapis.com/Queue\x12\x0e\n\x06\x66ilter\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05\x12\x12\n\npage_token\x18\x04 \x01(\t"`\n\x12ListQueuesResponse\x12\x31\n\x06queues\x18\x01 \x03(\x0b\x32!.google.cloud.tasks.v2beta3.Queue\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"H\n\x0fGetQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"\x84\x01\n\x12\x43reateQueueRequest\x12\x37\n\x06parent\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\x12\x1f\x63loudtasks.googleapis.com/Queue\x12\x35\n\x05queue\x18\x02 \x01(\x0b\x32!.google.cloud.tasks.v2beta3.QueueB\x03\xe0\x41\x02"|\n\x12UpdateQueueRequest\x12\x35\n\x05queue\x18\x01 \x01(\x0b\x32!.google.cloud.tasks.v2beta3.QueueB\x03\xe0\x41\x02\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"K\n\x12\x44\x65leteQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"J\n\x11PurgeQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"J\n\x11PauseQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"K\n\x12ResumeQueueRequest\x12\x35\n\x04name\x18\x01 \x01(\tB\'\xe0\x41\x02\xfa\x41!\n\x1f\x63loudtasks.googleapis.com/Queue"\xaf\x01\n\x10ListTasksRequest\x12\x36\n\x06parent\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \x12\x1e\x63loudtasks.googleapis.com/Task\x12<\n\rresponse_view\x18\x02 \x01(\x0e\x32%.google.cloud.tasks.v2beta3.Task.View\x12\x11\n\tpage_size\x18\x03 \x01(\x05\x12\x12\n\npage_token\x18\x04 \x01(\t"]\n\x11ListTasksResponse\x12/\n\x05tasks\x18\x01 \x03(\x0b\x32 .google.cloud.tasks.v2beta3.Task\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"\x84\x01\n\x0eGetTaskRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1e\x63loudtasks.googleapis.com/Task\x12<\n\rresponse_view\x18\x02 \x01(\x0e\x32%.google.cloud.tasks.v2beta3.Task.View"\xbe\x01\n\x11\x43reateTaskRequest\x12\x36\n\x06parent\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \x12\x1e\x63loudtasks.googleapis.com/Task\x12\x33\n\x04task\x18\x02 \x01(\x0b\x32 .google.cloud.tasks.v2beta3.TaskB\x03\xe0\x41\x02\x12<\n\rresponse_view\x18\x03 \x01(\x0e\x32%.google.cloud.tasks.v2beta3.Task.View"I\n\x11\x44\x65leteTaskRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1e\x63loudtasks.googleapis.com/Task"\x84\x01\n\x0eRunTaskRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1e\x63loudtasks.googleapis.com/Task\x12<\n\rresponse_view\x18\x02 \x01(\x0e\x32%.google.cloud.tasks.v2beta3.Task.View2\xa5\x16\n\nCloudTasks\x12\xad\x01\n\nListQueues\x12-.google.cloud.tasks.v2beta3.ListQueuesRequest\x1a..google.cloud.tasks.v2beta3.ListQueuesResponse"@\x82\xd3\xe4\x93\x02\x31\x12//v2beta3/{parent=projects/*/locations/*}/queues\xda\x41\x06parent\x12\x9a\x01\n\x08GetQueue\x12+.google.cloud.tasks.v2beta3.GetQueueRequest\x1a!.google.cloud.tasks.v2beta3.Queue">\x82\xd3\xe4\x93\x02\x31\x12//v2beta3/{name=projects/*/locations/*/queues/*}\xda\x41\x04name\x12\xaf\x01\n\x0b\x43reateQueue\x12..google.cloud.tasks.v2beta3.CreateQueueRequest\x1a!.google.cloud.tasks.v2beta3.Queue"M\x82\xd3\xe4\x93\x02\x38"//v2beta3/{parent=projects/*/locations/*}/queues:\x05queue\xda\x41\x0cparent,queue\x12\xba\x01\n\x0bUpdateQueue\x12..google.cloud.tasks.v2beta3.UpdateQueueRequest\x1a!.google.cloud.tasks.v2beta3.Queue"X\x82\xd3\xe4\x93\x02>25/v2beta3/{queue.name=projects/*/locations/*/queues/*}:\x05queue\xda\x41\x11queue,update_mask\x12\x95\x01\n\x0b\x44\x65leteQueue\x12..google.cloud.tasks.v2beta3.DeleteQueueRequest\x1a\x16.google.protobuf.Empty">\x82\xd3\xe4\x93\x02\x31*//v2beta3/{name=projects/*/locations/*/queues/*}\xda\x41\x04name\x12\xa7\x01\n\nPurgeQueue\x12-.google.cloud.tasks.v2beta3.PurgeQueueRequest\x1a!.google.cloud.tasks.v2beta3.Queue"G\x82\xd3\xe4\x93\x02:"5/v2beta3/{name=projects/*/locations/*/queues/*}:purge:\x01*\xda\x41\x04name\x12\xa7\x01\n\nPauseQueue\x12-.google.cloud.tasks.v2beta3.PauseQueueRequest\x1a!.google.cloud.tasks.v2beta3.Queue"G\x82\xd3\xe4\x93\x02:"5/v2beta3/{name=projects/*/locations/*/queues/*}:pause:\x01*\xda\x41\x04name\x12\xaa\x01\n\x0bResumeQueue\x12..google.cloud.tasks.v2beta3.ResumeQueueRequest\x1a!.google.cloud.tasks.v2beta3.Queue"H\x82\xd3\xe4\x93\x02;"6/v2beta3/{name=projects/*/locations/*/queues/*}:resume:\x01*\xda\x41\x04name\x12\xa1\x01\n\x0cGetIamPolicy\x12".google.iam.v1.GetIamPolicyRequest\x1a\x15.google.iam.v1.Policy"V\x82\xd3\xe4\x93\x02\x45"@/v2beta3/{resource=projects/*/locations/*/queues/*}:getIamPolicy:\x01*\xda\x41\x08resource\x12\xa8\x01\n\x0cSetIamPolicy\x12".google.iam.v1.SetIamPolicyRequest\x1a\x15.google.iam.v1.Policy"]\x82\xd3\xe4\x93\x02\x45"@/v2beta3/{resource=projects/*/locations/*/queues/*}:setIamPolicy:\x01*\xda\x41\x0fresource,policy\x12\xd3\x01\n\x12TestIamPermissions\x12(.google.iam.v1.TestIamPermissionsRequest\x1a).google.iam.v1.TestIamPermissionsResponse"h\x82\xd3\xe4\x93\x02K"F/v2beta3/{resource=projects/*/locations/*/queues/*}:testIamPermissions:\x01*\xda\x41\x14resource,permissions\x12\xb2\x01\n\tListTasks\x12,.google.cloud.tasks.v2beta3.ListTasksRequest\x1a-.google.cloud.tasks.v2beta3.ListTasksResponse"H\x82\xd3\xe4\x93\x02\x39\x12\x37/v2beta3/{parent=projects/*/locations/*/queues/*}/tasks\xda\x41\x06parent\x12\x9f\x01\n\x07GetTask\x12*.google.cloud.tasks.v2beta3.GetTaskRequest\x1a .google.cloud.tasks.v2beta3.Task"F\x82\xd3\xe4\x93\x02\x39\x12\x37/v2beta3/{name=projects/*/locations/*/queues/*/tasks/*}\xda\x41\x04name\x12\xaf\x01\n\nCreateTask\x12-.google.cloud.tasks.v2beta3.CreateTaskRequest\x1a .google.cloud.tasks.v2beta3.Task"P\x82\xd3\xe4\x93\x02<"7/v2beta3/{parent=projects/*/locations/*/queues/*}/tasks:\x01*\xda\x41\x0bparent,task\x12\x9b\x01\n\nDeleteTask\x12-.google.cloud.tasks.v2beta3.DeleteTaskRequest\x1a\x16.google.protobuf.Empty"F\x82\xd3\xe4\x93\x02\x39*7/v2beta3/{name=projects/*/locations/*/queues/*/tasks/*}\xda\x41\x04name\x12\xa6\x01\n\x07RunTask\x12*.google.cloud.tasks.v2beta3.RunTaskRequest\x1a .google.cloud.tasks.v2beta3.Task"M\x82\xd3\xe4\x93\x02@";/v2beta3/{name=projects/*/locations/*/queues/*/tasks/*}:run:\x01*\xda\x41\x04name\x1aM\xca\x41\x19\x63loudtasks.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformB|\n\x1e\x63om.google.cloud.tasks.v2beta3B\x0f\x43loudTasksProtoP\x01Z?google.golang.org/genproto/googleapis/cloud/tasks/v2beta3;tasks\xa2\x02\x05TASKSb\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, @@ -79,7 +79,7 @@ is_extension=False, extension_scope=None, serialized_options=_b( - "\340A\002\372A#\022!locations.googleapis.com/Location" + "\340A\002\372A!\022\037cloudtasks.googleapis.com/Queue" ), file=DESCRIPTOR, ), @@ -147,7 +147,7 @@ extension_ranges=[], oneofs=[], serialized_start=411, - serialized_end=544, + serialized_end=542, ) @@ -203,8 +203,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=546, - serialized_end=642, + serialized_start=544, + serialized_end=640, ) @@ -244,8 +244,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=644, - serialized_end=716, + serialized_start=642, + serialized_end=714, ) @@ -272,7 +272,7 @@ is_extension=False, extension_scope=None, serialized_options=_b( - "\340A\002\372A#\022!locations.googleapis.com/Location" + "\340A\002\372A!\022\037cloudtasks.googleapis.com/Queue" ), file=DESCRIPTOR, ), @@ -303,8 +303,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=719, - serialized_end=853, + serialized_start=717, + serialized_end=849, ) @@ -360,8 +360,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=855, - serialized_end=979, + serialized_start=851, + serialized_end=975, ) @@ -401,8 +401,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=981, - serialized_end=1056, + serialized_start=977, + serialized_end=1052, ) @@ -442,8 +442,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1058, - serialized_end=1132, + serialized_start=1054, + serialized_end=1128, ) @@ -483,8 +483,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1134, - serialized_end=1208, + serialized_start=1130, + serialized_end=1204, ) @@ -524,8 +524,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1210, - serialized_end=1285, + serialized_start=1206, + serialized_end=1281, ) @@ -552,7 +552,7 @@ is_extension=False, extension_scope=None, serialized_options=_b( - "\340A\002\372A!\022\037cloudtasks.googleapis.com/Queue" + "\340A\002\372A \022\036cloudtasks.googleapis.com/Task" ), file=DESCRIPTOR, ), @@ -619,8 +619,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1288, - serialized_end=1464, + serialized_start=1284, + serialized_end=1459, ) @@ -676,8 +676,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1466, - serialized_end=1559, + serialized_start=1461, + serialized_end=1554, ) @@ -735,8 +735,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1562, - serialized_end=1694, + serialized_start=1557, + serialized_end=1689, ) @@ -763,7 +763,7 @@ is_extension=False, extension_scope=None, serialized_options=_b( - "\340A\002\372A!\022\037cloudtasks.googleapis.com/Queue" + "\340A\002\372A \022\036cloudtasks.googleapis.com/Task" ), file=DESCRIPTOR, ), @@ -812,8 +812,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1697, - serialized_end=1888, + serialized_start=1692, + serialized_end=1882, ) @@ -853,8 +853,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1890, - serialized_end=1963, + serialized_start=1884, + serialized_end=1957, ) @@ -912,8 +912,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1966, - serialized_end=2098, + serialized_start=1960, + serialized_end=2092, ) _LISTQUEUESRESPONSE.fields_by_name[ @@ -1442,8 +1442,8 @@ serialized_options=_b( "\312A\031cloudtasks.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform" ), - serialized_start=2101, - serialized_end=4954, + serialized_start=2095, + serialized_end=4948, methods=[ _descriptor.MethodDescriptor( name="ListQueues", diff --git a/tasks/synth.metadata b/tasks/synth.metadata index cd7da5e5244f..ef9b977ad277 100644 --- a/tasks/synth.metadata +++ b/tasks/synth.metadata @@ -1,26 +1,26 @@ { - "updateTime": "2019-09-13T20:06:15.433828Z", + "updateTime": "2019-11-02T12:36:27.240345Z", "sources": [ { "generator": { "name": "artman", - "version": "0.36.2", - "dockerImage": "googleapis/artman@sha256:0e6f3a668cd68afc768ecbe08817cf6e56a0e64fcbdb1c58c3b97492d12418a1" + "version": "0.41.0", + "dockerImage": "googleapis/artman@sha256:75b38a3b073a7b243545f2332463096624c802bb1e56b8cb6f22ba1ecd325fa9" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "a602911ffd765e317860a74bccabfaae684ecf78", - "internalRef": "268935270" + "sha": "aac770126e2def40dcc387f50e8007b21c869e58", + "internalRef": "278016738" } }, { "template": { "name": "python_library", "origin": "synthtool.gcp", - "version": "2019.5.2" + "version": "2019.10.17" } } ], diff --git a/tasks/synth.py b/tasks/synth.py index 037fb4fd4a3c..4307a63bbfa1 100644 --- a/tasks/synth.py +++ b/tasks/synth.py @@ -88,6 +88,30 @@ "types\.View", "enums.Task.View") +# Change wording of optional params to disambiguate +# client library request methods from Cloud Task requests +s.replace("google/cloud/**/*.py", +""" retry \(Optional\[google\.api_core\.retry\.Retry\]\): A retry object used + to retry requests\. If ``None`` is specified, requests will + be retried using a default configuration\. + timeout \(Optional\[float\]\): The amount of time, in seconds, to wait + for the request to complete\. Note that if ``retry`` is + specified, the timeout applies to each individual attempt\. + metadata \(Optional\[Sequence\[Tuple\[str, str\]\]\]\): Additional metadata + that is provided to the method\. + +""", +""" retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry client library requests. If ``None`` is specified, + requests will be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the client library request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the client library method. + +""") + # ---------------------------------------------------------------------------- # Add templated files # ---------------------------------------------------------------------------- diff --git a/texttospeech/tests/system/v1/test_system_tts_v1.py b/texttospeech/tests/system/v1/test_system_tts_v1.py new file mode 100644 index 000000000000..4d90bae50427 --- /dev/null +++ b/texttospeech/tests/system/v1/test_system_tts_v1.py @@ -0,0 +1,38 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from google.cloud import texttospeech_v1 + + +class TestSystemSpeech(object): + def test_synthesize_speech(self): + client = texttospeech_v1.TextToSpeechClient() + + synthesis_input = texttospeech_v1.types.SynthesisInput(text="Hello, World!") + voice = texttospeech_v1.types.VoiceSelectionParams( + language_code="en-US", + ssml_gender=texttospeech_v1.enums.SsmlVoiceGender.NEUTRAL, + ) + audio_config = texttospeech_v1.types.AudioConfig( + audio_encoding=texttospeech_v1.enums.AudioEncoding.MP3 + ) + + response = client.synthesize_speech(synthesis_input, voice, audio_config) + assert response.audio_content is not None + + def test_list_voices(self): + client = texttospeech_v1.TextToSpeechClient() + + voices = client.list_voices() + assert len(voices.voices) > 0 diff --git a/texttospeech/tests/system/v1beta1/test_system_tts_v1beta1.py b/texttospeech/tests/system/v1beta1/test_system_tts_v1beta1.py new file mode 100644 index 000000000000..c6058acf27a0 --- /dev/null +++ b/texttospeech/tests/system/v1beta1/test_system_tts_v1beta1.py @@ -0,0 +1,40 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from google.cloud import texttospeech_v1beta1 + + +class TestSystemSpeech(object): + def test_synthesize_speech(self): + client = texttospeech_v1beta1.TextToSpeechClient() + + synthesis_input = texttospeech_v1beta1.types.SynthesisInput( + text="Hello, World!" + ) + voice = texttospeech_v1beta1.types.VoiceSelectionParams( + language_code="en-US", + ssml_gender=texttospeech_v1beta1.enums.SsmlVoiceGender.NEUTRAL, + ) + audio_config = texttospeech_v1beta1.types.AudioConfig( + audio_encoding=texttospeech_v1beta1.enums.AudioEncoding.MP3 + ) + + response = client.synthesize_speech(synthesis_input, voice, audio_config) + assert response.audio_content is not None + + def test_list_voices(self): + client = texttospeech_v1beta1.TextToSpeechClient() + + voices = client.list_voices() + assert len(voices.voices) > 0 diff --git a/trace/CHANGELOG.md b/trace/CHANGELOG.md index 4c1344b4b03c..adb6e946901c 100644 --- a/trace/CHANGELOG.md +++ b/trace/CHANGELOG.md @@ -4,6 +4,20 @@ [1]: https://pypi.org/project/google-cloud-trace/#history +## 0.23.0 + +10-15-2019 06:59 PDT + + +### Dependencies +- Pin 'google-cloud-core >= 1.0.3, < 2.0.0dev'. ([#9445](https://github.com/googleapis/google-cloud-python/pull/9445)) + +### Documentation +- Change requests intersphinx url (via synth). ([#9410](https://github.com/googleapis/google-cloud-python/pull/9410)) +- Fix intersphinx reference to requests ([#9294](https://github.com/googleapis/google-cloud-python/pull/9294)) +- Remove CI for gh-pages, use googleapis.dev for `api_core` refs. ([#9085](https://github.com/googleapis/google-cloud-python/pull/9085)) +- Remove compatability badges from READMEs. ([#9035](https://github.com/googleapis/google-cloud-python/pull/9035)) + ## 0.22.1 08-12-2019 13:51 PDT diff --git a/trace/docs/conf.py b/trace/docs/conf.py index b22fc3ab63e0..38a225821c83 100644 --- a/trace/docs/conf.py +++ b/trace/docs/conf.py @@ -264,7 +264,7 @@ u"google-cloud-trace Documentation", author, "manual", - ) + ), ] # The name of an image file (relative to this directory) to place at the top of @@ -314,7 +314,7 @@ "google-cloud-trace", "GAPIC library for the {metadata.shortName} v2 service", "APIs", - ) + ), ] # Documents to append as an appendix to all manuals. @@ -338,7 +338,7 @@ "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("https://requests.kennethreitz.org/en/stable/", None), + "requests": ("https://requests.kennethreitz.org/en/master/", None), "fastavro": ("https://fastavro.readthedocs.io/en/stable/", None), "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), } diff --git a/trace/google/cloud/trace/_gapic.py b/trace/google/cloud/trace/_gapic.py index 435d9fcc6c17..00ac94d336d4 100644 --- a/trace/google/cloud/trace/_gapic.py +++ b/trace/google/cloud/trace/_gapic.py @@ -314,6 +314,8 @@ def make_trace_api(client): proper configurations. """ generated = trace_service_client.TraceServiceClient( - credentials=client._credentials, client_info=client._client_info + credentials=client._credentials, + client_info=client._client_info, + client_options=client._client_options, ) return _TraceAPI(generated, client) diff --git a/trace/google/cloud/trace/client.py b/trace/google/cloud/trace/client.py index c4c9d5dd6840..90ddd7134c85 100644 --- a/trace/google/cloud/trace/client.py +++ b/trace/google/cloud/trace/client.py @@ -39,6 +39,9 @@ class Client(ClientWithProject): requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own library or partner tool. + client_options (Union[dict, google.api_core.client_options.ClientOptions]): + Client options used to set user options on the client. API Endpoint + should be set through client_options. """ SCOPE = ( @@ -49,9 +52,16 @@ class Client(ClientWithProject): _trace_api = None - def __init__(self, project=None, credentials=None, client_info=_CLIENT_INFO): + def __init__( + self, + project=None, + credentials=None, + client_info=_CLIENT_INFO, + client_options=None, + ): super(Client, self).__init__(project=project, credentials=credentials) self._client_info = client_info + self._client_options = client_options @property def trace_api(self): diff --git a/trace/google/cloud/trace/v1/_gapic.py b/trace/google/cloud/trace/v1/_gapic.py index 365f535eba5c..a5f5b2f7f0ab 100644 --- a/trace/google/cloud/trace/v1/_gapic.py +++ b/trace/google/cloud/trace/v1/_gapic.py @@ -178,7 +178,9 @@ def make_trace_api(client): proper configurations. """ generated = trace_service_client.TraceServiceClient( - credentials=client._credentials, client_info=client._client_info + credentials=client._credentials, + client_info=client._client_info, + client_options=client._client_options, ) return _TraceAPI(generated, client) diff --git a/trace/google/cloud/trace/v1/client.py b/trace/google/cloud/trace/v1/client.py index 266158933294..2b79773078a4 100644 --- a/trace/google/cloud/trace/v1/client.py +++ b/trace/google/cloud/trace/v1/client.py @@ -41,6 +41,9 @@ class Client(ClientWithProject): requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own library or partner tool. + client_options (Union[dict, google.api_core.client_options.ClientOptions]): + Client options used to set user options on the client. API Endpoint + should be set through client_options. """ SCOPE = ( @@ -51,9 +54,16 @@ class Client(ClientWithProject): _trace_api = None - def __init__(self, project=None, credentials=None, client_info=_CLIENT_INFO): + def __init__( + self, + project=None, + credentials=None, + client_info=_CLIENT_INFO, + client_options=None, + ): super(Client, self).__init__(project=project, credentials=credentials) self._client_info = client_info + self._client_options = client_options @property def trace_api(self): diff --git a/trace/google/cloud/trace_v1/__init__.py b/trace/google/cloud/trace_v1/__init__.py index ce7863f3616b..5af055f3da5f 100644 --- a/trace/google/cloud/trace_v1/__init__.py +++ b/trace/google/cloud/trace_v1/__init__.py @@ -27,4 +27,8 @@ class TraceServiceClient(trace_service_client.TraceServiceClient): enums = enums -__all__ = ("enums", "types", "TraceServiceClient") +__all__ = ( + "enums", + "types", + "TraceServiceClient", +) diff --git a/trace/google/cloud/trace_v1/gapic/trace_service_client.py b/trace/google/cloud/trace_v1/gapic/trace_service_client.py index 0e27928d6899..394965726520 100644 --- a/trace/google/cloud/trace_v1/gapic/trace_service_client.py +++ b/trace/google/cloud/trace_v1/gapic/trace_service_client.py @@ -39,7 +39,7 @@ from google.protobuf import timestamp_pb2 -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-trace").version +_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-trace",).version class TraceServiceClient(object): @@ -165,12 +165,12 @@ def __init__( self.transport = transport else: self.transport = trace_service_grpc_transport.TraceServiceGrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials + address=api_endpoint, channel=channel, credentials=credentials, ) if client_info is None: client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION + gapic_version=_GAPIC_LIBRARY_VERSION, ) else: client_info.gapic_version = _GAPIC_LIBRARY_VERSION @@ -181,7 +181,7 @@ def __init__( # (Ordinarily, these are the defaults specified in the `*_config.py` # file next to this one.) self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config["interfaces"][self._INTERFACE_NAME] + client_config["interfaces"][self._INTERFACE_NAME], ) # Save a dictionary of cached API call functions. @@ -252,7 +252,7 @@ def patch_traces( client_info=self._client_info, ) - request = trace_pb2.PatchTracesRequest(project_id=project_id, traces=traces) + request = trace_pb2.PatchTracesRequest(project_id=project_id, traces=traces,) if metadata is None: metadata = [] metadata = list(metadata) @@ -327,7 +327,7 @@ def get_trace( client_info=self._client_info, ) - request = trace_pb2.GetTraceRequest(project_id=project_id, trace_id=trace_id) + request = trace_pb2.GetTraceRequest(project_id=project_id, trace_id=trace_id,) return self._inner_api_calls["get_trace"]( request, retry=retry, timeout=timeout, metadata=metadata ) diff --git a/trace/google/cloud/trace_v1/gapic/transports/trace_service_grpc_transport.py b/trace/google/cloud/trace_v1/gapic/transports/trace_service_grpc_transport.py index ee3e1908808b..caab89f084d2 100644 --- a/trace/google/cloud/trace_v1/gapic/transports/trace_service_grpc_transport.py +++ b/trace/google/cloud/trace_v1/gapic/transports/trace_service_grpc_transport.py @@ -57,7 +57,7 @@ def __init__( # exception (channels come with credentials baked in already). if channel is not None and credentials is not None: raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive." + "The `channel` and `credentials` arguments are mutually " "exclusive.", ) # Create the channel. @@ -75,7 +75,9 @@ def __init__( # gRPC uses objects called "stubs" that are bound to the # channel and provide a basic method for each RPC. - self._stubs = {"trace_service_stub": trace_pb2_grpc.TraceServiceStub(channel)} + self._stubs = { + "trace_service_stub": trace_pb2_grpc.TraceServiceStub(channel), + } @classmethod def create_channel( diff --git a/trace/google/cloud/trace_v1/proto/trace_pb2.py b/trace/google/cloud/trace_v1/proto/trace_pb2.py index 25ac2a5cf6d9..b38ce92bc436 100644 --- a/trace/google/cloud/trace_v1/proto/trace_pb2.py +++ b/trace/google/cloud/trace_v1/proto/trace_pb2.py @@ -195,7 +195,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -401,8 +401,8 @@ ), ], extensions=[], - nested_types=[_TRACESPAN_LABELSENTRY], - enum_types=[_TRACESPAN_SPANKIND], + nested_types=[_TRACESPAN_LABELSENTRY,], + enum_types=[_TRACESPAN_SPANKIND,], serialized_options=None, is_extendable=False, syntax="proto3", @@ -567,7 +567,7 @@ ], extensions=[], nested_types=[], - enum_types=[_LISTTRACESREQUEST_VIEWTYPE], + enum_types=[_LISTTRACESREQUEST_VIEWTYPE,], serialized_options=None, is_extendable=False, syntax="proto3", diff --git a/trace/google/cloud/trace_v1/types.py b/trace/google/cloud/trace_v1/types.py index 058320e083da..6fa160d2e012 100644 --- a/trace/google/cloud/trace_v1/types.py +++ b/trace/google/cloud/trace_v1/types.py @@ -25,9 +25,14 @@ from google.protobuf import timestamp_pb2 -_shared_modules = [empty_pb2, timestamp_pb2] - -_local_modules = [trace_pb2] +_shared_modules = [ + empty_pb2, + timestamp_pb2, +] + +_local_modules = [ + trace_pb2, +] names = [] diff --git a/trace/google/cloud/trace_v2/__init__.py b/trace/google/cloud/trace_v2/__init__.py index c285c7cc0500..0678603445f7 100644 --- a/trace/google/cloud/trace_v2/__init__.py +++ b/trace/google/cloud/trace_v2/__init__.py @@ -27,4 +27,8 @@ class TraceServiceClient(trace_service_client.TraceServiceClient): enums = enums -__all__ = ("enums", "types", "TraceServiceClient") +__all__ = ( + "enums", + "types", + "TraceServiceClient", +) diff --git a/trace/google/cloud/trace_v2/gapic/trace_service_client.py b/trace/google/cloud/trace_v2/gapic/trace_service_client.py index ef489dc0d785..de2f3c088d2d 100644 --- a/trace/google/cloud/trace_v2/gapic/trace_service_client.py +++ b/trace/google/cloud/trace_v2/gapic/trace_service_client.py @@ -41,7 +41,7 @@ from google.rpc import status_pb2 -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-trace").version +_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-trace",).version class TraceServiceClient(object): @@ -84,7 +84,7 @@ def from_service_account_file(cls, filename, *args, **kwargs): def project_path(cls, project): """Return a fully-qualified project string.""" return google.api_core.path_template.expand( - "projects/{project}", project=project + "projects/{project}", project=project, ) @classmethod @@ -184,12 +184,12 @@ def __init__( self.transport = transport else: self.transport = trace_service_grpc_transport.TraceServiceGrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials + address=api_endpoint, channel=channel, credentials=credentials, ) if client_info is None: client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION + gapic_version=_GAPIC_LIBRARY_VERSION, ) else: client_info.gapic_version = _GAPIC_LIBRARY_VERSION @@ -200,7 +200,7 @@ def __init__( # (Ordinarily, these are the defaults specified in the `*_config.py` # file next to this one.) self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config["interfaces"][self._INTERFACE_NAME] + client_config["interfaces"][self._INTERFACE_NAME], ) # Save a dictionary of cached API call functions. @@ -269,7 +269,7 @@ def batch_write_spans( client_info=self._client_info, ) - request = tracing_pb2.BatchWriteSpansRequest(name=name, spans=spans) + request = tracing_pb2.BatchWriteSpansRequest(name=name, spans=spans,) if metadata is None: metadata = [] metadata = list(metadata) diff --git a/trace/google/cloud/trace_v2/gapic/transports/trace_service_grpc_transport.py b/trace/google/cloud/trace_v2/gapic/transports/trace_service_grpc_transport.py index 62e908229d96..efc84d0e6418 100644 --- a/trace/google/cloud/trace_v2/gapic/transports/trace_service_grpc_transport.py +++ b/trace/google/cloud/trace_v2/gapic/transports/trace_service_grpc_transport.py @@ -56,7 +56,7 @@ def __init__( # exception (channels come with credentials baked in already). if channel is not None and credentials is not None: raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive." + "The `channel` and `credentials` arguments are mutually " "exclusive.", ) # Create the channel. @@ -74,7 +74,9 @@ def __init__( # gRPC uses objects called "stubs" that are bound to the # channel and provide a basic method for each RPC. - self._stubs = {"trace_service_stub": tracing_pb2_grpc.TraceServiceStub(channel)} + self._stubs = { + "trace_service_stub": tracing_pb2_grpc.TraceServiceStub(channel), + } @classmethod def create_channel( diff --git a/trace/google/cloud/trace_v2/proto/trace_pb2.py b/trace/google/cloud/trace_v2/proto/trace_pb2.py index 5298cb85dfe1..22825ebdb550 100644 --- a/trace/google/cloud/trace_v2/proto/trace_pb2.py +++ b/trace/google/cloud/trace_v2/proto/trace_pb2.py @@ -204,7 +204,7 @@ ), ], extensions=[], - nested_types=[_SPAN_ATTRIBUTES_ATTRIBUTEMAPENTRY], + nested_types=[_SPAN_ATTRIBUTES_ATTRIBUTEMAPENTRY,], enum_types=[], serialized_options=None, is_extendable=False, @@ -353,7 +353,7 @@ ], extensions=[], nested_types=[], - enum_types=[_SPAN_TIMEEVENT_MESSAGEEVENT_TYPE], + enum_types=[_SPAN_TIMEEVENT_MESSAGEEVENT_TYPE,], serialized_options=None, is_extendable=False, syntax="proto3", @@ -426,7 +426,7 @@ ), ], extensions=[], - nested_types=[_SPAN_TIMEEVENT_ANNOTATION, _SPAN_TIMEEVENT_MESSAGEEVENT], + nested_types=[_SPAN_TIMEEVENT_ANNOTATION, _SPAN_TIMEEVENT_MESSAGEEVENT,], enum_types=[], serialized_options=None, is_extendable=False, @@ -439,7 +439,7 @@ index=0, containing_type=None, fields=[], - ) + ), ], serialized_start=1086, serialized_end=1693, @@ -601,7 +601,7 @@ ], extensions=[], nested_types=[], - enum_types=[_SPAN_LINK_TYPE], + enum_types=[_SPAN_LINK_TYPE,], serialized_options=None, is_extendable=False, syntax="proto3", @@ -1004,7 +1004,7 @@ index=0, containing_type=None, fields=[], - ) + ), ], serialized_start=2195, serialized_end=2337, @@ -1258,7 +1258,7 @@ ), ], extensions=[], - nested_types=[_STACKTRACE_STACKFRAME, _STACKTRACE_STACKFRAMES], + nested_types=[_STACKTRACE_STACKFRAME, _STACKTRACE_STACKFRAMES,], enum_types=[], serialized_options=None, is_extendable=False, diff --git a/trace/google/cloud/trace_v2/types.py b/trace/google/cloud/trace_v2/types.py index f6c6af03351e..3e7a67042e25 100644 --- a/trace/google/cloud/trace_v2/types.py +++ b/trace/google/cloud/trace_v2/types.py @@ -29,9 +29,18 @@ from google.rpc import status_pb2 -_shared_modules = [any_pb2, empty_pb2, timestamp_pb2, wrappers_pb2, status_pb2] - -_local_modules = [trace_pb2, tracing_pb2] +_shared_modules = [ + any_pb2, + empty_pb2, + timestamp_pb2, + wrappers_pb2, + status_pb2, +] + +_local_modules = [ + trace_pb2, + tracing_pb2, +] names = [] diff --git a/trace/setup.py b/trace/setup.py index d9c84832df9b..015b48f9973f 100644 --- a/trace/setup.py +++ b/trace/setup.py @@ -22,15 +22,15 @@ name = 'google-cloud-trace' description = 'Stackdriver Trace API client library' -version = '0.22.1' +version = '0.23.0' # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' # 'Development Status :: 5 - Production/Stable' release_status = 'Development Status :: 3 - Alpha' dependencies = [ - 'google-api-core[grpc] >= 1.14.0, < 2.0.0dev', - "google-cloud-core >= 1.0.0, < 2.0dev", + "google-api-core[grpc] >= 1.14.0, < 2.0.0dev", + "google-cloud-core >= 1.0.3, < 2.0dev", ] extras = { } diff --git a/trace/synth.metadata b/trace/synth.metadata index c3bfe0244394..925e26975512 100644 --- a/trace/synth.metadata +++ b/trace/synth.metadata @@ -1,26 +1,26 @@ { - "updateTime": "2019-08-06T12:44:36.453837Z", + "updateTime": "2019-10-29T12:42:54.805714Z", "sources": [ { "generator": { "name": "artman", - "version": "0.32.1", - "dockerImage": "googleapis/artman@sha256:a684d40ba9a4e15946f5f2ca6b4bd9fe301192f522e9de4fff622118775f309b" + "version": "0.40.3", + "dockerImage": "googleapis/artman@sha256:c805f50525f5f557886c94ab76f56eaa09cb1da58c3ee95111fd34259376621a" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "e699b0cba64ffddfae39633417180f1f65875896", - "internalRef": "261759677" + "sha": "532773acbed8d09451dafb3d403ab1823e6a6e1e", + "internalRef": "277177415" } }, { "template": { "name": "python_library", "origin": "synthtool.gcp", - "version": "2019.5.2" + "version": "2019.10.17" } } ], diff --git a/trace/tests/unit/v1/test__gapic_v1.py b/trace/tests/unit/v1/test__gapic_v1.py index bd21f026d056..bf9103e7abe5 100644 --- a/trace/tests/unit/v1/test__gapic_v1.py +++ b/trace/tests/unit/v1/test__gapic_v1.py @@ -227,7 +227,7 @@ def _call_fut(self, client): def test_it(self): from google.cloud.trace.v1._gapic import _TraceAPI - client = mock.Mock(spec=["_credentials", "_client_info"]) + client = mock.Mock(spec=["_credentials", "_client_info", "_client_options"]) patch_api = mock.patch( "google.cloud.trace.v1._gapic.trace_service_client.TraceServiceClient" @@ -237,7 +237,9 @@ def test_it(self): trace_api = self._call_fut(client) patched.assert_called_once_with( - credentials=client._credentials, client_info=client._client_info + credentials=client._credentials, + client_info=client._client_info, + client_options=client._client_options, ) self.assertIsInstance(trace_api, _TraceAPI) diff --git a/trace/tests/unit/v1/test_client_v1.py b/trace/tests/unit/v1/test_client_v1.py index d3e1c391592a..9f50e868caad 100644 --- a/trace/tests/unit/v1/test_client_v1.py +++ b/trace/tests/unit/v1/test_client_v1.py @@ -47,11 +47,16 @@ def test_constructor_defaults(self): def test_constructor_explicit(self): credentials = _make_credentials() client_info = mock.Mock() + client_options = mock.Mock() client = self._make_one( - project=self.project, credentials=credentials, client_info=client_info + project=self.project, + credentials=credentials, + client_info=client_info, + client_options=client_options, ) self.assertEqual(client.project, self.project) self.assertIs(client._client_info, client_info) + self.assertIs(client._client_options, client_options) def test_trace_api(self): clients = [] diff --git a/trace/tests/unit/v2/test__gapic_v2.py b/trace/tests/unit/v2/test__gapic_v2.py index e6859b5964ef..fd647240dba7 100644 --- a/trace/tests/unit/v2/test__gapic_v2.py +++ b/trace/tests/unit/v2/test__gapic_v2.py @@ -272,7 +272,7 @@ def _call_fut(self, client): def test_it(self): from google.cloud.trace._gapic import _TraceAPI - client = mock.Mock(spec=["_credentials", "_client_info"]) + client = mock.Mock(spec=["_credentials", "_client_info", "_client_options"]) patch_api = mock.patch( "google.cloud.trace._gapic.trace_service_client.TraceServiceClient" @@ -282,7 +282,9 @@ def test_it(self): trace_api = self._call_fut(client) patched.assert_called_once_with( - credentials=client._credentials, client_info=client._client_info + credentials=client._credentials, + client_info=client._client_info, + client_options=client._client_options, ) self.assertIsInstance(trace_api, _TraceAPI) diff --git a/trace/tests/unit/v2/test_client_v2.py b/trace/tests/unit/v2/test_client_v2.py index d5a14aeb6c92..4cd8a2a8705a 100644 --- a/trace/tests/unit/v2/test_client_v2.py +++ b/trace/tests/unit/v2/test_client_v2.py @@ -47,10 +47,15 @@ def test_constructor_defaults(self): def test_constructor_explicit(self): credentials = _make_credentials() client_info = mock.Mock() + client_options = mock.Mock() client = self._make_one( - project=self.project, credentials=credentials, client_info=client_info + project=self.project, + credentials=credentials, + client_info=client_info, + client_options=client_options, ) self.assertEqual(client.project, self.project) + self.assertIs(client._client_options, client_options) def test_trace_api(self): clients = [] diff --git a/translate/CHANGELOG.md b/translate/CHANGELOG.md index 7cdbb9408612..594d539a168c 100644 --- a/translate/CHANGELOG.md +++ b/translate/CHANGELOG.md @@ -4,6 +4,42 @@ [1]: https://pypi.org/project/google-cloud-translate/#history +## 2.0.0 + +10-23-2019 11:13 PDT + +### New Features +- Make v3 the default client. ([#9498](https://github.com/googleapis/google-cloud-python/pull/9498)) + +### Internal / Testing Changes +- Add VPC-SC system tests. ([#9272](https://github.com/googleapis/google-cloud-python/pull/9272)) + +## 1.7.0 + +10-07-2019 14:57 PDT + +### Implementation Changes +- Update docstrings, client confg (via synth). ([#9411](https://github.com/googleapis/google-cloud-python/pull/9411)) +- Remove send / receive message size limit (via synth). ([#8974](https://github.com/googleapis/google-cloud-python/pull/8974)) + +### New Features +- Add support for V3 of the API. ([#9020](https://github.com/googleapis/google-cloud-python/pull/9020)) +- Make `parent` argument required for all client methods in v3beta1; add `labels` argument (via synth). ([#9354](https://github.com/googleapis/google-cloud-python/pull/9354)) +- Add client options to translate_v2. ([#8737](https://github.com/googleapis/google-cloud-python/pull/8737)) + +### Dependencies +- Bump minimum version for google-api-core to 1.14.0. ([#8709](https://github.com/googleapis/google-cloud-python/pull/8709)) + +### Documentation +- Fix links to reference documentation. ([#8884](https://github.com/googleapis/google-cloud-python/pull/8884)) +- Link to googleapis.dev documentation in READMEs. ([#8705](https://github.com/googleapis/google-cloud-python/pull/8705)) + +### Internal / Testing Changes +- Update `ListGlossaries` method annotation (via synth) ([#9385](https://github.com/googleapis/google-cloud-python/pull/9385)) +- Fix intersphinx reference to requests ([#9294](https://github.com/googleapis/google-cloud-python/pull/9294)) +- Remove CI for gh-pages, use googleapis.dev for api_core refs. ([#9085](https://github.com/googleapis/google-cloud-python/pull/9085)) +- Update intersphinx mapping for requests. ([#8805](https://github.com/googleapis/google-cloud-python/pull/8805)) + ## 1.6.0 07-09-2019 13:13 PDT diff --git a/translate/README.rst b/translate/README.rst index af398ee4e3a0..1ca13ab74749 100644 --- a/translate/README.rst +++ b/translate/README.rst @@ -81,41 +81,4 @@ Windows pip install virtualenv virtualenv \Scripts\activate - \Scripts\pip.exe install google-cloud-translate - - -Example Usage -~~~~~~~~~~~~~ - -.. code-block:: python - - >>> from google.cloud import translate - >>> client = translate.Client() - >>> client.get_languages() - [ - { - 'language': 'af', - 'name': 'Afrikaans', - }, - ... - ] - >>> client.detect_language(['Me llamo', 'I am']) - [ - { - 'confidence': 0.25830904, - 'input': 'Me llamo', - 'language': 'es', - }, { - 'confidence': 0.17112699, - 'input': 'I am', - 'language': 'en', - }, - ] - >>> from google.cloud import translate - >>> client = translate.Client() - >>> client.translate('koszula') - { - 'translatedText': 'shirt', - 'detectedSourceLanguage': 'pl', - 'input': 'koszula', - } + \Scripts\pip.exe install google-cloud-translate \ No newline at end of file diff --git a/translate/docs/gapic/v3/api.rst b/translate/docs/gapic/v3/api.rst new file mode 100644 index 000000000000..e16ef6a89d51 --- /dev/null +++ b/translate/docs/gapic/v3/api.rst @@ -0,0 +1,6 @@ +Client for Cloud Translation API +================================ + +.. automodule:: google.cloud.translate_v3 + :members: + :inherited-members: \ No newline at end of file diff --git a/translate/docs/gapic/v3/types.rst b/translate/docs/gapic/v3/types.rst new file mode 100644 index 000000000000..fb83ab0657b6 --- /dev/null +++ b/translate/docs/gapic/v3/types.rst @@ -0,0 +1,5 @@ +Types for Cloud Translation API Client +====================================== + +.. automodule:: google.cloud.translate_v3.types + :members: \ No newline at end of file diff --git a/translate/docs/index.rst b/translate/docs/index.rst index 8b370cfdae11..b5d2405e2f47 100644 --- a/translate/docs/index.rst +++ b/translate/docs/index.rst @@ -1,31 +1,23 @@ .. include:: README.rst -Detailed Usage Guide --------------------- -.. toctree:: - :maxdepth: 2 - - usage - API Reference ------------- -A new beta release, spelled ``v3beta1``, is provided to provide for preview -of upcoming features. In order to use this, you will want to import from -``google.cloud.translate_v3beta1`` in lieu of ``google.cloud.translate``. +An API and type reference is provided for ``v3``, ``v3beta1``, and ``v2``. -.. toctree:: - :maxdepth: 2 - - gapic/v3beta1/api - gapic/v3beta1/types +By default, you will get ``v3``. A beta release, spelled ``v3beta1`` is +provided for preview of upcoming features. In order to use this, you will +want to import from ``google.cloud.translate_v3beta1`` in lieu of +``google.cloud.translate``. The previous release ``v2`` is also available. +Import from ``google.cloud.translate_v2`` to use this release. -An API and type reference is provided for v2: .. toctree:: - :maxdepth: 2 - - client + :maxdepth: 2 + + v3 + v3beta1 + v2 Changelog diff --git a/translate/docs/usage.rst b/translate/docs/usage.rst index 5fc2767c06e6..c6ee312c0cac 100644 --- a/translate/docs/usage.rst +++ b/translate/docs/usage.rst @@ -1,12 +1,12 @@ -Using the Translate Client +Translation v2 Usage Guide -------------------------- To create a client: .. doctest:: - >>> from google.cloud import translate - >>> client = translate.Client() + >>> from google.cloud import translate_v2 + >>> client = translate_v2.Client() By default, the client targets English when doing detections and translations, but a non-default value can be used as @@ -14,21 +14,21 @@ well: .. doctest:: - >>> from google.cloud import translate - >>> client = translate.Client(target_language='es') + >>> from google.cloud import translate_v2 + >>> client = translate_v2.Client(target_language='es') -The Google Cloud Translation API has three supported methods, and they +The Google Cloud Translation v2 API has three supported methods, and they map to three methods on a client: -:meth:`~google.cloud.translate.client.Client.get_languages`, -:meth:`~google.cloud.translate.client.Client.detect_language` and -:meth:`~google.cloud.translate.client.Client.translate`. +:meth:`~google.cloud.translate_v2.client.Client.get_languages`, +:meth:`~google.cloud.translate_v2.client.Client.detect_language` and +:meth:`~google.cloud.translate_v2.client.Client.translate`. To get a list of languages supported by the Google Cloud Translation API .. doctest:: - >>> from google.cloud import translate - >>> client = translate.Client() + >>> from google.cloud import translate_v2 + >>> client = translate_v2.Client() >>> client.get_languages() [ { @@ -42,8 +42,8 @@ To detect the language that some given text is written in: .. doctest:: - >>> from google.cloud import translate - >>> client = translate.Client() + >>> from google.cloud import translate_v2 + >>> client = translate_v2.Client() >>> client.detect_language(['Me llamo', 'I am']) [ { @@ -68,8 +68,8 @@ the source language: .. doctest:: - >>> from google.cloud import translate - >>> client = translate.Client() + >>> from google.cloud import translate_v2 + >>> client = translate_v2.Client() >>> client.translate('koszula') { 'translatedText': 'shirt', @@ -81,8 +81,8 @@ If the source language is known: .. doctest:: - >>> from google.cloud import translate - >>> client = translate.Client() + >>> from google.cloud import translate_v2 + >>> client = translate_v2.Client() >>> client.translate('camisa', source_language='es') { 'translatedText': 'shirt', @@ -93,8 +93,8 @@ or to use a non-default target language: .. doctest:: - >>> from google.cloud import translate - >>> client = translate.Client() + >>> from google.cloud import translate_v2 + >>> client = translate_v2.Client() >>> client.translate(['Me llamo Jeff', 'My name is Jeff'], ... target_language='de') [ diff --git a/translate/docs/v2.rst b/translate/docs/v2.rst new file mode 100644 index 000000000000..0e056c8ff63b --- /dev/null +++ b/translate/docs/v2.rst @@ -0,0 +1,8 @@ +v2 +=== + +.. toctree:: + :maxdepth: 2 + + client + usage diff --git a/translate/docs/v3.rst b/translate/docs/v3.rst new file mode 100644 index 000000000000..b6b8cd17e1e6 --- /dev/null +++ b/translate/docs/v3.rst @@ -0,0 +1,8 @@ +v3 +=== + +.. toctree:: + :maxdepth: 2 + + gapic/v3/api + gapic/v3/types diff --git a/translate/docs/v3beta1.rst b/translate/docs/v3beta1.rst new file mode 100644 index 000000000000..368aed30e243 --- /dev/null +++ b/translate/docs/v3beta1.rst @@ -0,0 +1,8 @@ +v3beta1 +======= + +.. toctree:: + :maxdepth: 2 + + gapic/v3beta1/api + gapic/v3beta1/types diff --git a/translate/google/cloud/translate.py b/translate/google/cloud/translate.py index 947f8c8ae093..27d23f137628 100644 --- a/translate/google/cloud/translate.py +++ b/translate/google/cloud/translate.py @@ -1,10 +1,12 @@ -# Copyright 2017 Google LLC +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -12,16 +14,11 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""Google Cloud Translation API wrapper.""" - -from google.cloud.translate_v2 import __version__ -from google.cloud.translate_v2.client import Client +from __future__ import absolute_import -# These constants are essentially deprecated; strings should be used instead. -# They are imported here for backwards compatibility. -from google.cloud.translate_v2.client import BASE -from google.cloud.translate_v2.client import NMT +from google.cloud.translate_v3 import TranslationServiceClient +from google.cloud.translate_v3 import types -__all__ = ("__version__", "BASE", "Client", "NMT") +__all__ = ("types", "TranslationServiceClient") diff --git a/translate/google/cloud/translate_v3/__init__.py b/translate/google/cloud/translate_v3/__init__.py new file mode 100644 index 000000000000..f4e61721160a --- /dev/null +++ b/translate/google/cloud/translate_v3/__init__.py @@ -0,0 +1,28 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from __future__ import absolute_import + +from google.cloud.translate_v3 import types +from google.cloud.translate_v3.gapic import translation_service_client + + +class TranslationServiceClient(translation_service_client.TranslationServiceClient): + __doc__ = translation_service_client.TranslationServiceClient.__doc__ + + +__all__ = ("types", "TranslationServiceClient") diff --git a/translate/google/cloud/translate_v3/gapic/__init__.py b/translate/google/cloud/translate_v3/gapic/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/translate/google/cloud/translate_v3/gapic/enums.py b/translate/google/cloud/translate_v3/gapic/enums.py new file mode 100644 index 000000000000..051328dab5b6 --- /dev/null +++ b/translate/google/cloud/translate_v3/gapic/enums.py @@ -0,0 +1,91 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Wrappers for protocol buffer enum types.""" + +import enum + + +class BatchTranslateMetadata(object): + class State(enum.IntEnum): + """ + State of the job. + + Attributes: + STATE_UNSPECIFIED (int): Invalid. + RUNNING (int): Request is being processed. + SUCCEEDED (int): The batch is processed, and at least one item was successfully + processed. + FAILED (int): The batch is done and no item was successfully processed. + CANCELLING (int): Request is in the process of being canceled after caller invoked + longrunning.Operations.CancelOperation on the request id. + CANCELLED (int): The batch is done after the user has called the + longrunning.Operations.CancelOperation. Any records processed before the + cancel command are output as specified in the request. + """ + + STATE_UNSPECIFIED = 0 + RUNNING = 1 + SUCCEEDED = 2 + FAILED = 3 + CANCELLING = 4 + CANCELLED = 5 + + +class CreateGlossaryMetadata(object): + class State(enum.IntEnum): + """ + Enumerates the possible states that the creation request can be in. + + Attributes: + STATE_UNSPECIFIED (int): Invalid. + RUNNING (int): Request is being processed. + SUCCEEDED (int): The glossary was successfully created. + FAILED (int): Failed to create the glossary. + CANCELLING (int): Request is in the process of being canceled after caller invoked + longrunning.Operations.CancelOperation on the request id. + CANCELLED (int): The glossary creation request was successfully canceled. + """ + + STATE_UNSPECIFIED = 0 + RUNNING = 1 + SUCCEEDED = 2 + FAILED = 3 + CANCELLING = 4 + CANCELLED = 5 + + +class DeleteGlossaryMetadata(object): + class State(enum.IntEnum): + """ + Enumerates the possible states that the creation request can be in. + + Attributes: + STATE_UNSPECIFIED (int): Invalid. + RUNNING (int): Request is being processed. + SUCCEEDED (int): The glossary was successfully deleted. + FAILED (int): Failed to delete the glossary. + CANCELLING (int): Request is in the process of being canceled after caller invoked + longrunning.Operations.CancelOperation on the request id. + CANCELLED (int): The glossary deletion request was successfully canceled. + """ + + STATE_UNSPECIFIED = 0 + RUNNING = 1 + SUCCEEDED = 2 + FAILED = 3 + CANCELLING = 4 + CANCELLED = 5 diff --git a/translate/google/cloud/translate_v3/gapic/translation_service_client.py b/translate/google/cloud/translate_v3/gapic/translation_service_client.py new file mode 100644 index 000000000000..c993c434b2ae --- /dev/null +++ b/translate/google/cloud/translate_v3/gapic/translation_service_client.py @@ -0,0 +1,1099 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Accesses the google.cloud.translation.v3 TranslationService API.""" + +import functools +import pkg_resources +import warnings + +from google.oauth2 import service_account +import google.api_core.client_options +import google.api_core.gapic_v1.client_info +import google.api_core.gapic_v1.config +import google.api_core.gapic_v1.method +import google.api_core.gapic_v1.routing_header +import google.api_core.grpc_helpers +import google.api_core.operation +import google.api_core.operations_v1 +import google.api_core.page_iterator +import google.api_core.path_template +import google.api_core.protobuf_helpers +import grpc + +from google.cloud.translate_v3.gapic import translation_service_client_config +from google.cloud.translate_v3.gapic.transports import ( + translation_service_grpc_transport, +) +from google.cloud.translate_v3.proto import translation_service_pb2 +from google.cloud.translate_v3.proto import translation_service_pb2_grpc +from google.longrunning import operations_pb2 + + +_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( + "google-cloud-translate" +).version + + +class TranslationServiceClient(object): + """Provides natural language translation operations.""" + + SERVICE_ADDRESS = "translate.googleapis.com:443" + """The default address of the service.""" + + # The name of the interface for this client. This is the key used to + # find the method configuration in the client_config dictionary. + _INTERFACE_NAME = "google.cloud.translation.v3.TranslationService" + + @classmethod + def from_service_account_file(cls, filename, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TranslationServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @classmethod + def glossary_path(cls, project, location, glossary): + """Return a fully-qualified glossary string.""" + return google.api_core.path_template.expand( + "projects/{project}/locations/{location}/glossaries/{glossary}", + project=project, + location=location, + glossary=glossary, + ) + + @classmethod + def location_path(cls, project, location): + """Return a fully-qualified location string.""" + return google.api_core.path_template.expand( + "projects/{project}/locations/{location}", + project=project, + location=location, + ) + + def __init__( + self, + transport=None, + channel=None, + credentials=None, + client_config=None, + client_info=None, + client_options=None, + ): + """Constructor. + + Args: + transport (Union[~.TranslationServiceGrpcTransport, + Callable[[~.Credentials, type], ~.TranslationServiceGrpcTransport]): A transport + instance, responsible for actually making the API calls. + The default transport uses the gRPC protocol. + This argument may also be a callable which returns a + transport instance. Callables will be sent the credentials + as the first argument and the default transport class as + the second argument. + channel (grpc.Channel): DEPRECATED. A ``Channel`` instance + through which to make calls. This argument is mutually exclusive + with ``credentials``; providing both will raise an exception. + credentials (google.auth.credentials.Credentials): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is mutually exclusive with providing a + transport instance to ``transport``; doing so will raise + an exception. + client_config (dict): DEPRECATED. A dictionary of call options for + each method. If not specified, the default configuration is used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + client_options (Union[dict, google.api_core.client_options.ClientOptions]): + Client options used to set user options on the client. API Endpoint + should be set through client_options. + """ + # Raise deprecation warnings for things we want to go away. + if client_config is not None: + warnings.warn( + "The `client_config` argument is deprecated.", + PendingDeprecationWarning, + stacklevel=2, + ) + else: + client_config = translation_service_client_config.config + + if channel: + warnings.warn( + "The `channel` argument is deprecated; use " "`transport` instead.", + PendingDeprecationWarning, + stacklevel=2, + ) + + api_endpoint = self.SERVICE_ADDRESS + if client_options: + if type(client_options) == dict: + client_options = google.api_core.client_options.from_dict( + client_options + ) + if client_options.api_endpoint: + api_endpoint = client_options.api_endpoint + + # Instantiate the transport. + # The transport is responsible for handling serialization and + # deserialization and actually sending data to the service. + if transport: + if callable(transport): + self.transport = transport( + credentials=credentials, + default_class=translation_service_grpc_transport.TranslationServiceGrpcTransport, + address=api_endpoint, + ) + else: + if credentials: + raise ValueError( + "Received both a transport instance and " + "credentials; these are mutually exclusive." + ) + self.transport = transport + else: + self.transport = translation_service_grpc_transport.TranslationServiceGrpcTransport( + address=api_endpoint, channel=channel, credentials=credentials + ) + + if client_info is None: + client_info = google.api_core.gapic_v1.client_info.ClientInfo( + gapic_version=_GAPIC_LIBRARY_VERSION + ) + else: + client_info.gapic_version = _GAPIC_LIBRARY_VERSION + self._client_info = client_info + + # Parse out the default settings for retry and timeout for each RPC + # from the client configuration. + # (Ordinarily, these are the defaults specified in the `*_config.py` + # file next to this one.) + self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( + client_config["interfaces"][self._INTERFACE_NAME] + ) + + # Save a dictionary of cached API call functions. + # These are the actual callables which invoke the proper + # transport methods, wrapped with `wrap_method` to add retry, + # timeout, and the like. + self._inner_api_calls = {} + + # Service calls + def translate_text( + self, + contents, + target_language_code, + parent, + mime_type=None, + source_language_code=None, + model=None, + glossary_config=None, + labels=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Translates input text and returns translated text. + + Example: + >>> from google.cloud import translate_v3 + >>> + >>> client = translate_v3.TranslationServiceClient() + >>> + >>> # TODO: Initialize `contents`: + >>> contents = [] + >>> + >>> # TODO: Initialize `target_language_code`: + >>> target_language_code = '' + >>> parent = client.location_path('[PROJECT]', '[LOCATION]') + >>> + >>> response = client.translate_text(contents, target_language_code, parent) + + Args: + contents (list[str]): Required. The content of the input in string format. + We recommend the total content be less than 30k codepoints. + Use BatchTranslateText for larger text. + target_language_code (str): Required. The BCP-47 language code to use for translation of the input + text, set to one of the language codes listed in Language Support. + parent (str): Required. Project or location to make a call. Must refer to a caller's + project. + + Format: ``projects/{project-number-or-id}`` or + ``projects/{project-number-or-id}/locations/{location-id}``. + + For global calls, use + ``projects/{project-number-or-id}/locations/global`` or + ``projects/{project-number-or-id}``. + + Non-global location is required for requests using AutoML models or + custom glossaries. + + Models and glossaries must be within the same region (have same + location-id), otherwise an INVALID\_ARGUMENT (400) error is returned. + mime_type (str): Optional. The format of the source text, for example, "text/html", + "text/plain". If left blank, the MIME type defaults to "text/html". + source_language_code (str): Optional. The BCP-47 language code of the input text if + known, for example, "en-US" or "sr-Latn". Supported language codes are + listed in Language Support. If the source language isn't specified, the API + attempts to identify the source language automatically and returns the + source language within the response. + model (str): Optional. The ``model`` type requested for this translation. + + The format depends on model type: + + - AutoML Translation models: + ``projects/{project-number-or-id}/locations/{location-id}/models/{model-id}`` + + - General (built-in) models: + ``projects/{project-number-or-id}/locations/{location-id}/models/general/nmt``, + ``projects/{project-number-or-id}/locations/{location-id}/models/general/base`` + + For global (non-regionalized) requests, use ``location-id`` ``global``. + For example, + ``projects/{project-number-or-id}/locations/global/models/general/nmt``. + + If missing, the system decides which google base model to use. + glossary_config (Union[dict, ~google.cloud.translate_v3.types.TranslateTextGlossaryConfig]): Optional. Glossary to be applied. The glossary must be within the same + region (have the same location-id) as the model, otherwise an + INVALID\_ARGUMENT (400) error is returned. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.translate_v3.types.TranslateTextGlossaryConfig` + labels (dict[str -> str]): Optional. The labels with user-defined metadata for the request. + + Label keys and values can be no longer than 63 characters + (Unicode codepoints), can only contain lowercase letters, numeric + characters, underscores and dashes. International characters are allowed. + Label values are optional. Label keys must start with a letter. + + See https://cloud.google.com/translate/docs/labels for more information. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.translate_v3.types.TranslateTextResponse` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "translate_text" not in self._inner_api_calls: + self._inner_api_calls[ + "translate_text" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.translate_text, + default_retry=self._method_configs["TranslateText"].retry, + default_timeout=self._method_configs["TranslateText"].timeout, + client_info=self._client_info, + ) + + request = translation_service_pb2.TranslateTextRequest( + contents=contents, + target_language_code=target_language_code, + parent=parent, + mime_type=mime_type, + source_language_code=source_language_code, + model=model, + glossary_config=glossary_config, + labels=labels, + ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("parent", parent)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + return self._inner_api_calls["translate_text"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def detect_language( + self, + parent, + model=None, + content=None, + mime_type=None, + labels=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Detects the language of text within a request. + + Example: + >>> from google.cloud import translate_v3 + >>> + >>> client = translate_v3.TranslationServiceClient() + >>> + >>> parent = client.location_path('[PROJECT]', '[LOCATION]') + >>> + >>> response = client.detect_language(parent) + + Args: + parent (str): Required. Project or location to make a call. Must refer to a caller's + project. + + Format: ``projects/{project-number-or-id}/locations/{location-id}`` or + ``projects/{project-number-or-id}``. + + For global calls, use + ``projects/{project-number-or-id}/locations/global`` or + ``projects/{project-number-or-id}``. + + Only models within the same region (has same location-id) can be used. + Otherwise an INVALID\_ARGUMENT (400) error is returned. + model (str): Optional. The language detection model to be used. + + Format: + ``projects/{project-number-or-id}/locations/{location-id}/models/language-detection/{model-id}`` + + Only one language detection model is currently supported: + ``projects/{project-number-or-id}/locations/{location-id}/models/language-detection/default``. + + If not specified, the default model is used. + content (str): The content of the input stored as a string. + mime_type (str): Optional. The format of the source text, for example, "text/html", + "text/plain". If left blank, the MIME type defaults to "text/html". + labels (dict[str -> str]): Optional. The labels with user-defined metadata for the request. + + Label keys and values can be no longer than 63 characters + (Unicode codepoints), can only contain lowercase letters, numeric + characters, underscores and dashes. International characters are allowed. + Label values are optional. Label keys must start with a letter. + + See https://cloud.google.com/translate/docs/labels for more information. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.translate_v3.types.DetectLanguageResponse` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "detect_language" not in self._inner_api_calls: + self._inner_api_calls[ + "detect_language" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.detect_language, + default_retry=self._method_configs["DetectLanguage"].retry, + default_timeout=self._method_configs["DetectLanguage"].timeout, + client_info=self._client_info, + ) + + # Sanity check: We have some fields which are mutually exclusive; + # raise ValueError if more than one is sent. + google.api_core.protobuf_helpers.check_oneof(content=content) + + request = translation_service_pb2.DetectLanguageRequest( + parent=parent, + model=model, + content=content, + mime_type=mime_type, + labels=labels, + ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("parent", parent)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + return self._inner_api_calls["detect_language"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def get_supported_languages( + self, + parent, + display_language_code=None, + model=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Returns a list of supported languages for translation. + + Example: + >>> from google.cloud import translate_v3 + >>> + >>> client = translate_v3.TranslationServiceClient() + >>> + >>> parent = client.location_path('[PROJECT]', '[LOCATION]') + >>> + >>> response = client.get_supported_languages(parent) + + Args: + parent (str): Required. Project or location to make a call. Must refer to a caller's + project. + + Format: ``projects/{project-number-or-id}`` or + ``projects/{project-number-or-id}/locations/{location-id}``. + + For global calls, use + ``projects/{project-number-or-id}/locations/global`` or + ``projects/{project-number-or-id}``. + + Non-global location is required for AutoML models. + + Only models within the same region (have same location-id) can be used, + otherwise an INVALID\_ARGUMENT (400) error is returned. + display_language_code (str): Optional. The language to use to return localized, human readable names + of supported languages. If missing, then display names are not returned + in a response. + model (str): Optional. Get supported languages of this model. + + The format depends on model type: + + - AutoML Translation models: + ``projects/{project-number-or-id}/locations/{location-id}/models/{model-id}`` + + - General (built-in) models: + ``projects/{project-number-or-id}/locations/{location-id}/models/general/nmt``, + ``projects/{project-number-or-id}/locations/{location-id}/models/general/base`` + + Returns languages supported by the specified model. If missing, we get + supported languages of Google general base (PBMT) model. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.translate_v3.types.SupportedLanguages` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "get_supported_languages" not in self._inner_api_calls: + self._inner_api_calls[ + "get_supported_languages" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.get_supported_languages, + default_retry=self._method_configs["GetSupportedLanguages"].retry, + default_timeout=self._method_configs["GetSupportedLanguages"].timeout, + client_info=self._client_info, + ) + + request = translation_service_pb2.GetSupportedLanguagesRequest( + parent=parent, display_language_code=display_language_code, model=model + ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("parent", parent)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + return self._inner_api_calls["get_supported_languages"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def batch_translate_text( + self, + parent, + source_language_code, + target_language_codes, + input_configs, + output_config, + models=None, + glossaries=None, + labels=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Translates a large volume of text in asynchronous batch mode. + This function provides real-time output as the inputs are being processed. + If caller cancels a request, the partial results (for an input file, it's + all or nothing) may still be available on the specified output location. + + This call returns immediately and you can + use google.longrunning.Operation.name to poll the status of the call. + + Example: + >>> from google.cloud import translate_v3 + >>> + >>> client = translate_v3.TranslationServiceClient() + >>> + >>> parent = client.location_path('[PROJECT]', '[LOCATION]') + >>> + >>> # TODO: Initialize `source_language_code`: + >>> source_language_code = '' + >>> + >>> # TODO: Initialize `target_language_codes`: + >>> target_language_codes = [] + >>> + >>> # TODO: Initialize `input_configs`: + >>> input_configs = [] + >>> + >>> # TODO: Initialize `output_config`: + >>> output_config = {} + >>> + >>> response = client.batch_translate_text(parent, source_language_code, target_language_codes, input_configs, output_config) + >>> + >>> def callback(operation_future): + ... # Handle result. + ... result = operation_future.result() + >>> + >>> response.add_done_callback(callback) + >>> + >>> # Handle metadata. + >>> metadata = response.metadata() + + Args: + parent (str): Required. Location to make a call. Must refer to a caller's project. + + Format: ``projects/{project-number-or-id}/locations/{location-id}``. + + The ``global`` location is not supported for batch translation. + + Only AutoML Translation models or glossaries within the same region + (have the same location-id) can be used, otherwise an INVALID\_ARGUMENT + (400) error is returned. + source_language_code (str): Required. Source language code. + target_language_codes (list[str]): Required. Specify up to 10 language codes here. + input_configs (list[Union[dict, ~google.cloud.translate_v3.types.InputConfig]]): Required. Input configurations. + The total number of files matched should be <= 1000. + The total content size should be <= 100M Unicode codepoints. + The files must use UTF-8 encoding. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.translate_v3.types.InputConfig` + output_config (Union[dict, ~google.cloud.translate_v3.types.OutputConfig]): Required. Output configuration. + If 2 input configs match to the same file (that is, same input path), + we don't generate output for duplicate inputs. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.translate_v3.types.OutputConfig` + models (dict[str -> str]): Optional. The models to use for translation. Map's key is target + language code. Map's value is model name. Value can be a built-in + general model, or an AutoML Translation model. + + The value format depends on model type: + + - AutoML Translation models: + ``projects/{project-number-or-id}/locations/{location-id}/models/{model-id}`` + + - General (built-in) models: + ``projects/{project-number-or-id}/locations/{location-id}/models/general/nmt``, + ``projects/{project-number-or-id}/locations/{location-id}/models/general/base`` + + If the map is empty or a specific model is not requested for a language + pair, then default google model (nmt) is used. + glossaries (dict[str -> Union[dict, ~google.cloud.translate_v3.types.TranslateTextGlossaryConfig]]): Optional. Glossaries to be applied for translation. + It's keyed by target language code. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.translate_v3.types.TranslateTextGlossaryConfig` + labels (dict[str -> str]): Optional. The labels with user-defined metadata for the request. + + Label keys and values can be no longer than 63 characters + (Unicode codepoints), can only contain lowercase letters, numeric + characters, underscores and dashes. International characters are allowed. + Label values are optional. Label keys must start with a letter. + + See https://cloud.google.com/translate/docs/labels for more information. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.translate_v3.types._OperationFuture` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "batch_translate_text" not in self._inner_api_calls: + self._inner_api_calls[ + "batch_translate_text" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.batch_translate_text, + default_retry=self._method_configs["BatchTranslateText"].retry, + default_timeout=self._method_configs["BatchTranslateText"].timeout, + client_info=self._client_info, + ) + + request = translation_service_pb2.BatchTranslateTextRequest( + parent=parent, + source_language_code=source_language_code, + target_language_codes=target_language_codes, + input_configs=input_configs, + output_config=output_config, + models=models, + glossaries=glossaries, + labels=labels, + ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("parent", parent)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + operation = self._inner_api_calls["batch_translate_text"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + return google.api_core.operation.from_gapic( + operation, + self.transport._operations_client, + translation_service_pb2.BatchTranslateResponse, + metadata_type=translation_service_pb2.BatchTranslateMetadata, + ) + + def create_glossary( + self, + parent, + glossary, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Creates a glossary and returns the long-running operation. Returns + NOT\_FOUND, if the project doesn't exist. + + Example: + >>> from google.cloud import translate_v3 + >>> + >>> client = translate_v3.TranslationServiceClient() + >>> + >>> parent = client.location_path('[PROJECT]', '[LOCATION]') + >>> + >>> # TODO: Initialize `glossary`: + >>> glossary = {} + >>> + >>> response = client.create_glossary(parent, glossary) + >>> + >>> def callback(operation_future): + ... # Handle result. + ... result = operation_future.result() + >>> + >>> response.add_done_callback(callback) + >>> + >>> # Handle metadata. + >>> metadata = response.metadata() + + Args: + parent (str): Required. The project name. + glossary (Union[dict, ~google.cloud.translate_v3.types.Glossary]): Required. The glossary to create. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.translate_v3.types.Glossary` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.translate_v3.types._OperationFuture` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "create_glossary" not in self._inner_api_calls: + self._inner_api_calls[ + "create_glossary" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.create_glossary, + default_retry=self._method_configs["CreateGlossary"].retry, + default_timeout=self._method_configs["CreateGlossary"].timeout, + client_info=self._client_info, + ) + + request = translation_service_pb2.CreateGlossaryRequest( + parent=parent, glossary=glossary + ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("parent", parent)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + operation = self._inner_api_calls["create_glossary"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + return google.api_core.operation.from_gapic( + operation, + self.transport._operations_client, + translation_service_pb2.Glossary, + metadata_type=translation_service_pb2.CreateGlossaryMetadata, + ) + + def list_glossaries( + self, + parent, + page_size=None, + filter_=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Lists glossaries in a project. Returns NOT\_FOUND, if the project + doesn't exist. + + Example: + >>> from google.cloud import translate_v3 + >>> + >>> client = translate_v3.TranslationServiceClient() + >>> + >>> parent = client.location_path('[PROJECT]', '[LOCATION]') + >>> + >>> # Iterate over all results + >>> for element in client.list_glossaries(parent): + ... # process element + ... pass + >>> + >>> + >>> # Alternatively: + >>> + >>> # Iterate over results one page at a time + >>> for page in client.list_glossaries(parent).pages: + ... for element in page: + ... # process element + ... pass + + Args: + parent (str): Required. The name of the project from which to list all of the glossaries. + page_size (int): The maximum number of resources contained in the + underlying API response. If page streaming is performed per- + resource, this parameter does not affect the return value. If page + streaming is performed per-page, this determines the maximum number + of resources in a page. + filter_ (str): Optional. Filter specifying constraints of a list operation. + Filtering is not supported yet, and the parameter currently has no effect. + If missing, no filtering is performed. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.api_core.page_iterator.PageIterator` instance. + An iterable of :class:`~google.cloud.translate_v3.types.Glossary` instances. + You can also iterate over the pages of the response + using its `pages` property. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "list_glossaries" not in self._inner_api_calls: + self._inner_api_calls[ + "list_glossaries" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.list_glossaries, + default_retry=self._method_configs["ListGlossaries"].retry, + default_timeout=self._method_configs["ListGlossaries"].timeout, + client_info=self._client_info, + ) + + request = translation_service_pb2.ListGlossariesRequest( + parent=parent, page_size=page_size, filter=filter_ + ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("parent", parent)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + iterator = google.api_core.page_iterator.GRPCIterator( + client=None, + method=functools.partial( + self._inner_api_calls["list_glossaries"], + retry=retry, + timeout=timeout, + metadata=metadata, + ), + request=request, + items_field="glossaries", + request_token_field="page_token", + response_token_field="next_page_token", + ) + return iterator + + def get_glossary( + self, + name, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Gets a glossary. Returns NOT\_FOUND, if the glossary doesn't exist. + + Example: + >>> from google.cloud import translate_v3 + >>> + >>> client = translate_v3.TranslationServiceClient() + >>> + >>> name = client.glossary_path('[PROJECT]', '[LOCATION]', '[GLOSSARY]') + >>> + >>> response = client.get_glossary(name) + + Args: + name (str): Required. The name of the glossary to retrieve. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.translate_v3.types.Glossary` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "get_glossary" not in self._inner_api_calls: + self._inner_api_calls[ + "get_glossary" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.get_glossary, + default_retry=self._method_configs["GetGlossary"].retry, + default_timeout=self._method_configs["GetGlossary"].timeout, + client_info=self._client_info, + ) + + request = translation_service_pb2.GetGlossaryRequest(name=name) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("name", name)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + return self._inner_api_calls["get_glossary"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def delete_glossary( + self, + name, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Deletes a glossary, or cancels glossary construction if the glossary + isn't created yet. Returns NOT\_FOUND, if the glossary doesn't exist. + + Example: + >>> from google.cloud import translate_v3 + >>> + >>> client = translate_v3.TranslationServiceClient() + >>> + >>> name = client.glossary_path('[PROJECT]', '[LOCATION]', '[GLOSSARY]') + >>> + >>> response = client.delete_glossary(name) + >>> + >>> def callback(operation_future): + ... # Handle result. + ... result = operation_future.result() + >>> + >>> response.add_done_callback(callback) + >>> + >>> # Handle metadata. + >>> metadata = response.metadata() + + Args: + name (str): Required. The name of the glossary to delete. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.translate_v3.types._OperationFuture` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "delete_glossary" not in self._inner_api_calls: + self._inner_api_calls[ + "delete_glossary" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.delete_glossary, + default_retry=self._method_configs["DeleteGlossary"].retry, + default_timeout=self._method_configs["DeleteGlossary"].timeout, + client_info=self._client_info, + ) + + request = translation_service_pb2.DeleteGlossaryRequest(name=name) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("name", name)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + operation = self._inner_api_calls["delete_glossary"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + return google.api_core.operation.from_gapic( + operation, + self.transport._operations_client, + translation_service_pb2.DeleteGlossaryResponse, + metadata_type=translation_service_pb2.DeleteGlossaryMetadata, + ) diff --git a/translate/google/cloud/translate_v3/gapic/translation_service_client_config.py b/translate/google/cloud/translate_v3/gapic/translation_service_client_config.py new file mode 100644 index 000000000000..0e611ae41d1a --- /dev/null +++ b/translate/google/cloud/translate_v3/gapic/translation_service_client_config.py @@ -0,0 +1,63 @@ +config = { + "interfaces": { + "google.cloud.translation.v3.TranslationService": { + "retry_codes": { + "idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], + "non_idempotent": [], + }, + "retry_params": { + "default": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 20000, + "rpc_timeout_multiplier": 1.0, + "max_rpc_timeout_millis": 20000, + "total_timeout_millis": 600000, + } + }, + "methods": { + "TranslateText": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default", + }, + "DetectLanguage": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default", + }, + "GetSupportedLanguages": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default", + }, + "BatchTranslateText": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default", + }, + "CreateGlossary": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default", + }, + "ListGlossaries": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default", + }, + "GetGlossary": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default", + }, + "DeleteGlossary": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default", + }, + }, + } + } +} diff --git a/translate/google/cloud/translate_v3/gapic/transports/__init__.py b/translate/google/cloud/translate_v3/gapic/transports/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/translate/google/cloud/translate_v3/gapic/transports/translation_service_grpc_transport.py b/translate/google/cloud/translate_v3/gapic/transports/translation_service_grpc_transport.py new file mode 100644 index 000000000000..ed038e376db0 --- /dev/null +++ b/translate/google/cloud/translate_v3/gapic/transports/translation_service_grpc_transport.py @@ -0,0 +1,234 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import google.api_core.grpc_helpers +import google.api_core.operations_v1 + +from google.cloud.translate_v3.proto import translation_service_pb2_grpc + + +class TranslationServiceGrpcTransport(object): + """gRPC transport class providing stubs for + google.cloud.translation.v3 TranslationService API. + + The transport provides access to the raw gRPC stubs, + which can be used to take advantage of advanced + features of gRPC. + """ + + # The scopes needed to make gRPC calls to all of the methods defined + # in this service. + _OAUTH_SCOPES = ( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-translation", + ) + + def __init__( + self, channel=None, credentials=None, address="translate.googleapis.com:443" + ): + """Instantiate the transport class. + + Args: + channel (grpc.Channel): A ``Channel`` instance through + which to make calls. This argument is mutually exclusive + with ``credentials``; providing both will raise an exception. + credentials (google.auth.credentials.Credentials): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If none + are specified, the client will attempt to ascertain the + credentials from the environment. + address (str): The address where the service is hosted. + """ + # If both `channel` and `credentials` are specified, raise an + # exception (channels come with credentials baked in already). + if channel is not None and credentials is not None: + raise ValueError( + "The `channel` and `credentials` arguments are mutually " "exclusive." + ) + + # Create the channel. + if channel is None: + channel = self.create_channel( + address=address, + credentials=credentials, + options={ + "grpc.max_send_message_length": -1, + "grpc.max_receive_message_length": -1, + }.items(), + ) + + self._channel = channel + + # gRPC uses objects called "stubs" that are bound to the + # channel and provide a basic method for each RPC. + self._stubs = { + "translation_service_stub": translation_service_pb2_grpc.TranslationServiceStub( + channel + ) + } + + # Because this API includes a method that returns a + # long-running operation (proto: google.longrunning.Operation), + # instantiate an LRO client. + self._operations_client = google.api_core.operations_v1.OperationsClient( + channel + ) + + @classmethod + def create_channel( + cls, address="translate.googleapis.com:443", credentials=None, **kwargs + ): + """Create and return a gRPC channel object. + + Args: + address (str): The host for the channel to use. + credentials (~.Credentials): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + kwargs (dict): Keyword arguments, which are passed to the + channel creation. + + Returns: + grpc.Channel: A gRPC channel object. + """ + return google.api_core.grpc_helpers.create_channel( + address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs + ) + + @property + def channel(self): + """The gRPC channel used by the transport. + + Returns: + grpc.Channel: A gRPC channel object. + """ + return self._channel + + @property + def translate_text(self): + """Return the gRPC stub for :meth:`TranslationServiceClient.translate_text`. + + Translates input text and returns translated text. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["translation_service_stub"].TranslateText + + @property + def detect_language(self): + """Return the gRPC stub for :meth:`TranslationServiceClient.detect_language`. + + Detects the language of text within a request. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["translation_service_stub"].DetectLanguage + + @property + def get_supported_languages(self): + """Return the gRPC stub for :meth:`TranslationServiceClient.get_supported_languages`. + + Returns a list of supported languages for translation. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["translation_service_stub"].GetSupportedLanguages + + @property + def batch_translate_text(self): + """Return the gRPC stub for :meth:`TranslationServiceClient.batch_translate_text`. + + Translates a large volume of text in asynchronous batch mode. + This function provides real-time output as the inputs are being processed. + If caller cancels a request, the partial results (for an input file, it's + all or nothing) may still be available on the specified output location. + + This call returns immediately and you can + use google.longrunning.Operation.name to poll the status of the call. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["translation_service_stub"].BatchTranslateText + + @property + def create_glossary(self): + """Return the gRPC stub for :meth:`TranslationServiceClient.create_glossary`. + + Creates a glossary and returns the long-running operation. Returns + NOT\_FOUND, if the project doesn't exist. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["translation_service_stub"].CreateGlossary + + @property + def list_glossaries(self): + """Return the gRPC stub for :meth:`TranslationServiceClient.list_glossaries`. + + Lists glossaries in a project. Returns NOT\_FOUND, if the project + doesn't exist. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["translation_service_stub"].ListGlossaries + + @property + def get_glossary(self): + """Return the gRPC stub for :meth:`TranslationServiceClient.get_glossary`. + + Gets a glossary. Returns NOT\_FOUND, if the glossary doesn't exist. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["translation_service_stub"].GetGlossary + + @property + def delete_glossary(self): + """Return the gRPC stub for :meth:`TranslationServiceClient.delete_glossary`. + + Deletes a glossary, or cancels glossary construction if the glossary + isn't created yet. Returns NOT\_FOUND, if the glossary doesn't exist. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["translation_service_stub"].DeleteGlossary diff --git a/translate/google/cloud/translate_v3/proto/__init__.py b/translate/google/cloud/translate_v3/proto/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/translate/google/cloud/translate_v3/proto/translation_service.proto b/translate/google/cloud/translate_v3/proto/translation_service.proto new file mode 100644 index 000000000000..ad43831c29b0 --- /dev/null +++ b/translate/google/cloud/translate_v3/proto/translation_service.proto @@ -0,0 +1,926 @@ +// Copyright 2019 Google LLC. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +syntax = "proto3"; + +package google.cloud.translation.v3; + +import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; +import "google/longrunning/operations.proto"; +import "google/protobuf/timestamp.proto"; + +option cc_enable_arenas = true; +option csharp_namespace = "Google.Cloud.Translate.V3"; +option go_package = "google.golang.org/genproto/googleapis/cloud/translate/v3;translate"; +option java_multiple_files = true; +option java_outer_classname = "TranslationServiceProto"; +option java_package = "com.google.cloud.translate.v3"; +option php_namespace = "Google\\Cloud\\Translate\\V3"; +option ruby_package = "Google::Cloud::Translate::V3"; + +// Proto file for the Cloud Translation API (v3 GA). + +// Provides natural language translation operations. +service TranslationService { + option (google.api.default_host) = "translate.googleapis.com"; + option (google.api.oauth_scopes) = + "https://www.googleapis.com/auth/cloud-platform," + "https://www.googleapis.com/auth/cloud-translation"; + + // Translates input text and returns translated text. + rpc TranslateText(TranslateTextRequest) returns (TranslateTextResponse) { + option (google.api.http) = { + post: "/v3/{parent=projects/*/locations/*}:translateText" + body: "*" + additional_bindings { + post: "/v3/{parent=projects/*}:translateText" + body: "*" + } + }; + option (google.api.method_signature) = + "parent,target_language_code,contents"; + option (google.api.method_signature) = + "parent,model,mime_type,source_language_code,target_language_code,contents"; + } + + // Detects the language of text within a request. + rpc DetectLanguage(DetectLanguageRequest) returns (DetectLanguageResponse) { + option (google.api.http) = { + post: "/v3/{parent=projects/*/locations/*}:detectLanguage" + body: "*" + additional_bindings { + post: "/v3/{parent=projects/*}:detectLanguage" + body: "*" + } + }; + option (google.api.method_signature) = "parent,model,mime_type,content"; + } + + // Returns a list of supported languages for translation. + rpc GetSupportedLanguages(GetSupportedLanguagesRequest) + returns (SupportedLanguages) { + option (google.api.http) = { + get: "/v3/{parent=projects/*/locations/*}/supportedLanguages" + additional_bindings { get: "/v3/{parent=projects/*}/supportedLanguages" } + }; + option (google.api.method_signature) = "parent,model,display_language_code"; + } + + // Translates a large volume of text in asynchronous batch mode. + // This function provides real-time output as the inputs are being processed. + // If caller cancels a request, the partial results (for an input file, it's + // all or nothing) may still be available on the specified output location. + // + // This call returns immediately and you can + // use google.longrunning.Operation.name to poll the status of the call. + rpc BatchTranslateText(BatchTranslateTextRequest) + returns (google.longrunning.Operation) { + option (google.api.http) = { + post: "/v3/{parent=projects/*/locations/*}:batchTranslateText" + body: "*" + }; + option (google.longrunning.operation_info) = { + response_type: "BatchTranslateResponse" + metadata_type: "BatchTranslateMetadata" + }; + } + + // Creates a glossary and returns the long-running operation. Returns + // NOT_FOUND, if the project doesn't exist. + rpc CreateGlossary(CreateGlossaryRequest) + returns (google.longrunning.Operation) { + option (google.api.http) = { + post: "/v3/{parent=projects/*/locations/*}/glossaries" + body: "glossary" + }; + option (google.api.method_signature) = "parent,glossary"; + option (google.longrunning.operation_info) = { + response_type: "Glossary" + metadata_type: "CreateGlossaryMetadata" + }; + } + + // Lists glossaries in a project. Returns NOT_FOUND, if the project doesn't + // exist. + rpc ListGlossaries(ListGlossariesRequest) returns (ListGlossariesResponse) { + option (google.api.http) = { + get: "/v3/{parent=projects/*/locations/*}/glossaries" + }; + option (google.api.method_signature) = "parent"; + } + + // Gets a glossary. Returns NOT_FOUND, if the glossary doesn't + // exist. + rpc GetGlossary(GetGlossaryRequest) returns (Glossary) { + option (google.api.http) = { + get: "/v3/{name=projects/*/locations/*/glossaries/*}" + }; + option (google.api.method_signature) = "name"; + } + + // Deletes a glossary, or cancels glossary construction + // if the glossary isn't created yet. + // Returns NOT_FOUND, if the glossary doesn't exist. + rpc DeleteGlossary(DeleteGlossaryRequest) + returns (google.longrunning.Operation) { + option (google.api.http) = { + delete: "/v3/{name=projects/*/locations/*/glossaries/*}" + }; + option (google.api.method_signature) = "name"; + option (google.longrunning.operation_info) = { + response_type: "DeleteGlossaryResponse" + metadata_type: "DeleteGlossaryMetadata" + }; + } +} + +// Configures which glossary should be used for a specific target language, +// and defines options for applying that glossary. +message TranslateTextGlossaryConfig { + // Required. Specifies the glossary used for this translation. Use + // this format: projects/*/locations/*/glossaries/* + string glossary = 1 [(google.api.field_behavior) = REQUIRED]; + + // Optional. Indicates match is case-insensitive. + // Default value is false if missing. + bool ignore_case = 2 [(google.api.field_behavior) = OPTIONAL]; +} + +// The request message for synchronous translation. +message TranslateTextRequest { + // Required. The content of the input in string format. + // We recommend the total content be less than 30k codepoints. + // Use BatchTranslateText for larger text. + repeated string contents = 1 [(google.api.field_behavior) = REQUIRED]; + + // Optional. The format of the source text, for example, "text/html", + // "text/plain". If left blank, the MIME type defaults to "text/html". + string mime_type = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The BCP-47 language code of the input text if + // known, for example, "en-US" or "sr-Latn". Supported language codes are + // listed in Language Support. If the source language isn't specified, the API + // attempts to identify the source language automatically and returns the + // source language within the response. + string source_language_code = 4 [(google.api.field_behavior) = OPTIONAL]; + + // Required. The BCP-47 language code to use for translation of the input + // text, set to one of the language codes listed in Language Support. + string target_language_code = 5 [(google.api.field_behavior) = REQUIRED]; + + // Required. Project or location to make a call. Must refer to a caller's + // project. + // + // Format: `projects/{project-number-or-id}` or + // `projects/{project-number-or-id}/locations/{location-id}`. + // + // For global calls, use `projects/{project-number-or-id}/locations/global` or + // `projects/{project-number-or-id}`. + // + // Non-global location is required for requests using AutoML models or + // custom glossaries. + // + // Models and glossaries must be within the same region (have same + // location-id), otherwise an INVALID_ARGUMENT (400) error is returned. + string parent = 8 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "locations.googleapis.com/Location" + } + ]; + + // Optional. The `model` type requested for this translation. + // + // The format depends on model type: + // + // - AutoML Translation models: + // `projects/{project-number-or-id}/locations/{location-id}/models/{model-id}` + // + // - General (built-in) models: + // `projects/{project-number-or-id}/locations/{location-id}/models/general/nmt`, + // `projects/{project-number-or-id}/locations/{location-id}/models/general/base` + // + // + // For global (non-regionalized) requests, use `location-id` `global`. + // For example, + // `projects/{project-number-or-id}/locations/global/models/general/nmt`. + // + // If missing, the system decides which google base model to use. + string model = 6 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Glossary to be applied. The glossary must be + // within the same region (have the same location-id) as the model, otherwise + // an INVALID_ARGUMENT (400) error is returned. + TranslateTextGlossaryConfig glossary_config = 7 + [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The labels with user-defined metadata for the request. + // + // Label keys and values can be no longer than 63 characters + // (Unicode codepoints), can only contain lowercase letters, numeric + // characters, underscores and dashes. International characters are allowed. + // Label values are optional. Label keys must start with a letter. + // + // See https://cloud.google.com/translate/docs/labels for more information. + map labels = 10 [(google.api.field_behavior) = OPTIONAL]; +} + +message TranslateTextResponse { + // Text translation responses with no glossary applied. + // This field has the same length as + // [`contents`][google.cloud.translation.v3.TranslateTextRequest.contents]. + repeated Translation translations = 1; + + // Text translation responses if a glossary is provided in the request. + // This can be the same as + // [`translations`][google.cloud.translation.v3.TranslateTextResponse.translations] + // if no terms apply. This field has the same length as + // [`contents`][google.cloud.translation.v3.TranslateTextRequest.contents]. + repeated Translation glossary_translations = 3; +} + +// A single translation response. +message Translation { + // Text translated into the target language. + string translated_text = 1; + + // Only present when `model` is present in the request. + // `model` here is normalized to have project number. + // + // For example: + // If the `model` requested in TranslationTextRequest is + // `projects/{project-id}/locations/{location-id}/models/general/nmt` then + // `model` here would be normalized to + // `projects/{project-number}/locations/{location-id}/models/general/nmt`. + string model = 2; + + // The BCP-47 language code of source text in the initial request, detected + // automatically, if no source language was passed within the initial + // request. If the source language was passed, auto-detection of the language + // does not occur and this field is empty. + string detected_language_code = 4; + + // The `glossary_config` used for this translation. + TranslateTextGlossaryConfig glossary_config = 3; +} + +// The request message for language detection. +message DetectLanguageRequest { + // Required. Project or location to make a call. Must refer to a caller's + // project. + // + // Format: `projects/{project-number-or-id}/locations/{location-id}` or + // `projects/{project-number-or-id}`. + // + // For global calls, use `projects/{project-number-or-id}/locations/global` or + // `projects/{project-number-or-id}`. + // + // Only models within the same region (has same location-id) can be used. + // Otherwise an INVALID_ARGUMENT (400) error is returned. + string parent = 5 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "locations.googleapis.com/Location" + } + ]; + + // Optional. The language detection model to be used. + // + // Format: + // `projects/{project-number-or-id}/locations/{location-id}/models/language-detection/{model-id}` + // + // Only one language detection model is currently supported: + // `projects/{project-number-or-id}/locations/{location-id}/models/language-detection/default`. + // + // If not specified, the default model is used. + string model = 4 [(google.api.field_behavior) = OPTIONAL]; + + // Required. The source of the document from which to detect the language. + oneof source { + // The content of the input stored as a string. + string content = 1; + } + + // Optional. The format of the source text, for example, "text/html", + // "text/plain". If left blank, the MIME type defaults to "text/html". + string mime_type = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The labels with user-defined metadata for the request. + // + // Label keys and values can be no longer than 63 characters + // (Unicode codepoints), can only contain lowercase letters, numeric + // characters, underscores and dashes. International characters are allowed. + // Label values are optional. Label keys must start with a letter. + // + // See https://cloud.google.com/translate/docs/labels for more information. + map labels = 6 [(google.api.field_behavior) = OPTIONAL]; +} + +// The response message for language detection. +message DetectedLanguage { + // The BCP-47 language code of source content in the request, detected + // automatically. + string language_code = 1; + + // The confidence of the detection result for this language. + float confidence = 2; +} + +// The response message for language detection. +message DetectLanguageResponse { + // A list of detected languages sorted by detection confidence in descending + // order. The most probable language first. + repeated DetectedLanguage languages = 1; +} + +// The request message for discovering supported languages. +message GetSupportedLanguagesRequest { + // Required. Project or location to make a call. Must refer to a caller's + // project. + // + // Format: `projects/{project-number-or-id}` or + // `projects/{project-number-or-id}/locations/{location-id}`. + // + // For global calls, use `projects/{project-number-or-id}/locations/global` or + // `projects/{project-number-or-id}`. + // + // Non-global location is required for AutoML models. + // + // Only models within the same region (have same location-id) can be used, + // otherwise an INVALID_ARGUMENT (400) error is returned. + string parent = 3 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "locations.googleapis.com/Location" + } + ]; + + // Optional. The language to use to return localized, human readable names + // of supported languages. If missing, then display names are not returned + // in a response. + string display_language_code = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Get supported languages of this model. + // + // The format depends on model type: + // + // - AutoML Translation models: + // `projects/{project-number-or-id}/locations/{location-id}/models/{model-id}` + // + // - General (built-in) models: + // `projects/{project-number-or-id}/locations/{location-id}/models/general/nmt`, + // `projects/{project-number-or-id}/locations/{location-id}/models/general/base` + // + // + // Returns languages supported by the specified model. + // If missing, we get supported languages of Google general base (PBMT) model. + string model = 2 [(google.api.field_behavior) = OPTIONAL]; +} + +// The response message for discovering supported languages. +message SupportedLanguages { + // A list of supported language responses. This list contains an entry + // for each language the Translation API supports. + repeated SupportedLanguage languages = 1; +} + +// A single supported language response corresponds to information related +// to one supported language. +message SupportedLanguage { + // Supported language code, generally consisting of its ISO 639-1 + // identifier, for example, 'en', 'ja'. In certain cases, BCP-47 codes + // including language and region identifiers are returned (for example, + // 'zh-TW' and 'zh-CN') + string language_code = 1; + + // Human readable name of the language localized in the display language + // specified in the request. + string display_name = 2; + + // Can be used as source language. + bool support_source = 3; + + // Can be used as target language. + bool support_target = 4; +} + +// The Google Cloud Storage location for the input content. +message GcsSource { + // Required. Source data URI. For example, `gs://my_bucket/my_object`. + string input_uri = 1; +} + +// Input configuration for BatchTranslateText request. +message InputConfig { + // Optional. Can be "text/plain" or "text/html". + // For `.tsv`, "text/html" is used if mime_type is missing. + // For `.html`, this field must be "text/html" or empty. + // For `.txt`, this field must be "text/plain" or empty. + string mime_type = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Required. Specify the input. + oneof source { + // Required. Google Cloud Storage location for the source input. + // This can be a single file (for example, + // `gs://translation-test/input.tsv`) or a wildcard (for example, + // `gs://translation-test/*`). If a file extension is `.tsv`, it can + // contain either one or two columns. The first column (optional) is the id + // of the text request. If the first column is missing, we use the row + // number (0-based) from the input file as the ID in the output file. The + // second column is the actual text to be + // translated. We recommend each row be <= 10K Unicode codepoints, + // otherwise an error might be returned. + // Note that the input tsv must be RFC 4180 compliant. + // + // You could use https://github.com/Clever/csvlint to check potential + // formatting errors in your tsv file. + // csvlint --delimiter='\t' your_input_file.tsv + // + // The other supported file extensions are `.txt` or `.html`, which is + // treated as a single large chunk of text. + GcsSource gcs_source = 2; + } +} + +// The Google Cloud Storage location for the output content. +message GcsDestination { + // Required. There must be no files under 'output_uri_prefix'. + // 'output_uri_prefix' must end with "/" and start with "gs://", otherwise an + // INVALID_ARGUMENT (400) error is returned. + string output_uri_prefix = 1; +} + +// Output configuration for BatchTranslateText request. +message OutputConfig { + // Required. The destination of output. + oneof destination { + // Google Cloud Storage destination for output content. + // For every single input file (for example, gs://a/b/c.[extension]), we + // generate at most 2 * n output files. (n is the # of target_language_codes + // in the BatchTranslateTextRequest). + // + // Output files (tsv) generated are compliant with RFC 4180 except that + // record delimiters are '\n' instead of '\r\n'. We don't provide any way to + // change record delimiters. + // + // While the input files are being processed, we write/update an index file + // 'index.csv' under 'output_uri_prefix' (for example, + // gs://translation-test/index.csv) The index file is generated/updated as + // new files are being translated. The format is: + // + // input_file,target_language_code,translations_file,errors_file, + // glossary_translations_file,glossary_errors_file + // + // input_file is one file we matched using gcs_source.input_uri. + // target_language_code is provided in the request. + // translations_file contains the translations. (details provided below) + // errors_file contains the errors during processing of the file. (details + // below). Both translations_file and errors_file could be empty + // strings if we have no content to output. + // glossary_translations_file and glossary_errors_file are always empty + // strings if the input_file is tsv. They could also be empty if we have no + // content to output. + // + // Once a row is present in index.csv, the input/output matching never + // changes. Callers should also expect all the content in input_file are + // processed and ready to be consumed (that is, no partial output file is + // written). + // + // The format of translations_file (for target language code 'trg') is: + // gs://translation_test/a_b_c_'trg'_translations.[extension] + // + // If the input file extension is tsv, the output has the following + // columns: + // Column 1: ID of the request provided in the input, if it's not + // provided in the input, then the input row number is used (0-based). + // Column 2: source sentence. + // Column 3: translation without applying a glossary. Empty string if there + // is an error. + // Column 4 (only present if a glossary is provided in the request): + // translation after applying the glossary. Empty string if there is an + // error applying the glossary. Could be same string as column 3 if there is + // no glossary applied. + // + // If input file extension is a txt or html, the translation is directly + // written to the output file. If glossary is requested, a separate + // glossary_translations_file has format of + // gs://translation_test/a_b_c_'trg'_glossary_translations.[extension] + // + // The format of errors file (for target language code 'trg') is: + // gs://translation_test/a_b_c_'trg'_errors.[extension] + // + // If the input file extension is tsv, errors_file contains the following: + // Column 1: ID of the request provided in the input, if it's not + // provided in the input, then the input row number is used (0-based). + // Column 2: source sentence. + // Column 3: Error detail for the translation. Could be empty. + // Column 4 (only present if a glossary is provided in the request): + // Error when applying the glossary. + // + // If the input file extension is txt or html, glossary_error_file will be + // generated that contains error details. glossary_error_file has format of + // gs://translation_test/a_b_c_'trg'_glossary_errors.[extension] + GcsDestination gcs_destination = 1; + } +} + +// The batch translation request. +message BatchTranslateTextRequest { + // Required. Location to make a call. Must refer to a caller's project. + // + // Format: `projects/{project-number-or-id}/locations/{location-id}`. + // + // The `global` location is not supported for batch translation. + // + // Only AutoML Translation models or glossaries within the same region (have + // the same location-id) can be used, otherwise an INVALID_ARGUMENT (400) + // error is returned. + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "locations.googleapis.com/Location" + } + ]; + + // Required. Source language code. + string source_language_code = 2 [(google.api.field_behavior) = REQUIRED]; + + // Required. Specify up to 10 language codes here. + repeated string target_language_codes = 3 + [(google.api.field_behavior) = REQUIRED]; + + // Optional. The models to use for translation. Map's key is target language + // code. Map's value is model name. Value can be a built-in general model, + // or an AutoML Translation model. + // + // The value format depends on model type: + // + // - AutoML Translation models: + // `projects/{project-number-or-id}/locations/{location-id}/models/{model-id}` + // + // - General (built-in) models: + // `projects/{project-number-or-id}/locations/{location-id}/models/general/nmt`, + // `projects/{project-number-or-id}/locations/{location-id}/models/general/base` + // + // + // If the map is empty or a specific model is + // not requested for a language pair, then default google model (nmt) is used. + map models = 4 [(google.api.field_behavior) = OPTIONAL]; + + // Required. Input configurations. + // The total number of files matched should be <= 1000. + // The total content size should be <= 100M Unicode codepoints. + // The files must use UTF-8 encoding. + repeated InputConfig input_configs = 5 + [(google.api.field_behavior) = REQUIRED]; + + // Required. Output configuration. + // If 2 input configs match to the same file (that is, same input path), + // we don't generate output for duplicate inputs. + OutputConfig output_config = 6 [(google.api.field_behavior) = REQUIRED]; + + // Optional. Glossaries to be applied for translation. + // It's keyed by target language code. + map glossaries = 7 + [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The labels with user-defined metadata for the request. + // + // Label keys and values can be no longer than 63 characters + // (Unicode codepoints), can only contain lowercase letters, numeric + // characters, underscores and dashes. International characters are allowed. + // Label values are optional. Label keys must start with a letter. + // + // See https://cloud.google.com/translate/docs/labels for more information. + map labels = 9 [(google.api.field_behavior) = OPTIONAL]; +} + +// State metadata for the batch translation operation. +message BatchTranslateMetadata { + // State of the job. + enum State { + // Invalid. + STATE_UNSPECIFIED = 0; + + // Request is being processed. + RUNNING = 1; + + // The batch is processed, and at least one item was successfully + // processed. + SUCCEEDED = 2; + + // The batch is done and no item was successfully processed. + FAILED = 3; + + // Request is in the process of being canceled after caller invoked + // longrunning.Operations.CancelOperation on the request id. + CANCELLING = 4; + + // The batch is done after the user has called the + // longrunning.Operations.CancelOperation. Any records processed before the + // cancel command are output as specified in the request. + CANCELLED = 5; + } + + // The state of the operation. + State state = 1; + + // Number of successfully translated characters so far (Unicode codepoints). + int64 translated_characters = 2; + + // Number of characters that have failed to process so far (Unicode + // codepoints). + int64 failed_characters = 3; + + // Total number of characters (Unicode codepoints). + // This is the total number of codepoints from input files times the number of + // target languages and appears here shortly after the call is submitted. + int64 total_characters = 4; + + // Time when the operation was submitted. + google.protobuf.Timestamp submit_time = 5; +} + +// Stored in the +// [google.longrunning.Operation.response][google.longrunning.Operation.response] +// field returned by BatchTranslateText if at least one sentence is translated +// successfully. +message BatchTranslateResponse { + // Total number of characters (Unicode codepoints). + int64 total_characters = 1; + + // Number of successfully translated characters (Unicode codepoints). + int64 translated_characters = 2; + + // Number of characters that have failed to process (Unicode codepoints). + int64 failed_characters = 3; + + // Time when the operation was submitted. + google.protobuf.Timestamp submit_time = 4; + + // The time when the operation is finished and + // [google.longrunning.Operation.done][google.longrunning.Operation.done] is + // set to true. + google.protobuf.Timestamp end_time = 5; +} + +// Input configuration for glossaries. +message GlossaryInputConfig { + // Required. Specify the input. + oneof source { + // Required. Google Cloud Storage location of glossary data. + // File format is determined based on the filename extension. API returns + // [google.rpc.Code.INVALID_ARGUMENT] for unsupported URI-s and file + // formats. Wildcards are not allowed. This must be a single file in one of + // the following formats: + // + // For unidirectional glossaries: + // + // - TSV/CSV (`.tsv`/`.csv`): 2 column file, tab- or comma-separated. + // The first column is source text. The second column is target text. + // The file must not contain headers. That is, the first row is data, not + // column names. + // + // - TMX (`.tmx`): TMX file with parallel data defining source/target term + // pairs. + // + // For equivalent term sets glossaries: + // + // - CSV (`.csv`): Multi-column CSV file defining equivalent glossary terms + // in multiple languages. The format is defined for Google Translation + // Toolkit and documented in [Use a + // glossary](https://support.google.com/translatortoolkit/answer/6306379?hl=en). + GcsSource gcs_source = 1; + } +} + +// Represents a glossary built from user provided data. +message Glossary { + option (google.api.resource) = { + type: "translate.googleapis.com/Glossary" + pattern: "projects/{project}/locations/{location}/glossaries/{glossary}" + }; + + // Used with unidirectional glossaries. + message LanguageCodePair { + // Required. The BCP-47 language code of the input text, for example, + // "en-US". Expected to be an exact match for GlossaryTerm.language_code. + string source_language_code = 1; + + // Required. The BCP-47 language code for translation output, for example, + // "zh-CN". Expected to be an exact match for GlossaryTerm.language_code. + string target_language_code = 2; + } + + // Used with equivalent term set glossaries. + message LanguageCodesSet { + // The BCP-47 language code(s) for terms defined in the glossary. + // All entries are unique. The list contains at least two entries. + // Expected to be an exact match for GlossaryTerm.language_code. + repeated string language_codes = 1; + } + + // Required. The resource name of the glossary. Glossary names have the form + // `projects/{project-number-or-id}/locations/{location-id}/glossaries/{glossary-id}`. + string name = 1; + + // Languages supported by the glossary. + oneof languages { + // Used with unidirectional glossaries. + LanguageCodePair language_pair = 3; + + // Used with equivalent term set glossaries. + LanguageCodesSet language_codes_set = 4; + } + + // Required. Provides examples to build the glossary from. + // Total glossary must not exceed 10M Unicode codepoints. + GlossaryInputConfig input_config = 5; + + // Output only. The number of entries defined in the glossary. + int32 entry_count = 6 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. When CreateGlossary was called. + google.protobuf.Timestamp submit_time = 7 + [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. When the glossary creation was finished. + google.protobuf.Timestamp end_time = 8 + [(google.api.field_behavior) = OUTPUT_ONLY]; +} + +// Request message for CreateGlossary. +message CreateGlossaryRequest { + // Required. The project name. + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "locations.googleapis.com/Location" + } + ]; + + // Required. The glossary to create. + Glossary glossary = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// Request message for GetGlossary. +message GetGlossaryRequest { + // Required. The name of the glossary to retrieve. + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "translate.googleapis.com/Glossary" + } + ]; +} + +// Request message for DeleteGlossary. +message DeleteGlossaryRequest { + // Required. The name of the glossary to delete. + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "translate.googleapis.com/Glossary" + } + ]; +} + +// Request message for ListGlossaries. +message ListGlossariesRequest { + // Required. The name of the project from which to list all of the glossaries. + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "locations.googleapis.com/Location" + } + ]; + + // Optional. Requested page size. The server may return fewer glossaries than + // requested. If unspecified, the server picks an appropriate default. + int32 page_size = 2 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. A token identifying a page of results the server should return. + // Typically, this is the value of [ListGlossariesResponse.next_page_token] + // returned from the previous call to `ListGlossaries` method. + // The first page is returned if `page_token`is empty or missing. + string page_token = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Filter specifying constraints of a list operation. + // Filtering is not supported yet, and the parameter currently has no effect. + // If missing, no filtering is performed. + string filter = 4 [(google.api.field_behavior) = OPTIONAL]; +} + +// Response message for ListGlossaries. +message ListGlossariesResponse { + // The list of glossaries for a project. + repeated Glossary glossaries = 1; + + // A token to retrieve a page of results. Pass this value in the + // [ListGlossariesRequest.page_token] field in the subsequent call to + // `ListGlossaries` method to retrieve the next page of results. + string next_page_token = 2; +} + +// Stored in the +// [google.longrunning.Operation.metadata][google.longrunning.Operation.metadata] +// field returned by CreateGlossary. +message CreateGlossaryMetadata { + // Enumerates the possible states that the creation request can be in. + enum State { + // Invalid. + STATE_UNSPECIFIED = 0; + + // Request is being processed. + RUNNING = 1; + + // The glossary was successfully created. + SUCCEEDED = 2; + + // Failed to create the glossary. + FAILED = 3; + + // Request is in the process of being canceled after caller invoked + // longrunning.Operations.CancelOperation on the request id. + CANCELLING = 4; + + // The glossary creation request was successfully canceled. + CANCELLED = 5; + } + + // The name of the glossary that is being created. + string name = 1; + + // The current state of the glossary creation operation. + State state = 2; + + // The time when the operation was submitted to the server. + google.protobuf.Timestamp submit_time = 3; +} + +// Stored in the +// [google.longrunning.Operation.metadata][google.longrunning.Operation.metadata] +// field returned by DeleteGlossary. +message DeleteGlossaryMetadata { + // Enumerates the possible states that the creation request can be in. + enum State { + // Invalid. + STATE_UNSPECIFIED = 0; + + // Request is being processed. + RUNNING = 1; + + // The glossary was successfully deleted. + SUCCEEDED = 2; + + // Failed to delete the glossary. + FAILED = 3; + + // Request is in the process of being canceled after caller invoked + // longrunning.Operations.CancelOperation on the request id. + CANCELLING = 4; + + // The glossary deletion request was successfully canceled. + CANCELLED = 5; + } + + // The name of the glossary that is being deleted. + string name = 1; + + // The current state of the glossary deletion operation. + State state = 2; + + // The time when the operation was submitted to the server. + google.protobuf.Timestamp submit_time = 3; +} + +// Stored in the +// [google.longrunning.Operation.response][google.longrunning.Operation.response] +// field returned by DeleteGlossary. +message DeleteGlossaryResponse { + // The name of the deleted glossary. + string name = 1; + + // The time when the operation was submitted to the server. + google.protobuf.Timestamp submit_time = 2; + + // The time when the glossary deletion is finished and + // [google.longrunning.Operation.done][google.longrunning.Operation.done] is + // set to true. + google.protobuf.Timestamp end_time = 3; +} diff --git a/translate/google/cloud/translate_v3/proto/translation_service_pb2.py b/translate/google/cloud/translate_v3/proto/translation_service_pb2.py new file mode 100644 index 000000000000..7bc7e15d284e --- /dev/null +++ b/translate/google/cloud/translate_v3/proto/translation_service_pb2.py @@ -0,0 +1,3902 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/translation_v3/proto/translation_service.proto + +import sys + +_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 +from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 +from google.longrunning import ( + operations_pb2 as google_dot_longrunning_dot_operations__pb2, +) +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name="google/cloud/translation_v3/proto/translation_service.proto", + package="google.cloud.translation.v3", + syntax="proto3", + serialized_options=_b( + "\n\035com.google.cloud.translate.v3B\027TranslationServiceProtoP\001ZBgoogle.golang.org/genproto/googleapis/cloud/translate/v3;translate\370\001\001\252\002\031Google.Cloud.Translate.V3\312\002\031Google\\Cloud\\Translate\\V3\352\002\034Google::Cloud::Translate::V3" + ), + serialized_pb=_b( + '\n;google/cloud/translation_v3/proto/translation_service.proto\x12\x1bgoogle.cloud.translation.v3\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a#google/longrunning/operations.proto\x1a\x1fgoogle/protobuf/timestamp.proto"N\n\x1bTranslateTextGlossaryConfig\x12\x15\n\x08glossary\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x18\n\x0bignore_case\x18\x02 \x01(\x08\x42\x03\xe0\x41\x01"\xb5\x03\n\x14TranslateTextRequest\x12\x15\n\x08\x63ontents\x18\x01 \x03(\tB\x03\xe0\x41\x02\x12\x16\n\tmime_type\x18\x03 \x01(\tB\x03\xe0\x41\x01\x12!\n\x14source_language_code\x18\x04 \x01(\tB\x03\xe0\x41\x01\x12!\n\x14target_language_code\x18\x05 \x01(\tB\x03\xe0\x41\x02\x12\x39\n\x06parent\x18\x08 \x01(\tB)\xe0\x41\x02\xfa\x41#\n!locations.googleapis.com/Location\x12\x12\n\x05model\x18\x06 \x01(\tB\x03\xe0\x41\x01\x12V\n\x0fglossary_config\x18\x07 \x01(\x0b\x32\x38.google.cloud.translation.v3.TranslateTextGlossaryConfigB\x03\xe0\x41\x01\x12R\n\x06labels\x18\n \x03(\x0b\x32=.google.cloud.translation.v3.TranslateTextRequest.LabelsEntryB\x03\xe0\x41\x01\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xa0\x01\n\x15TranslateTextResponse\x12>\n\x0ctranslations\x18\x01 \x03(\x0b\x32(.google.cloud.translation.v3.Translation\x12G\n\x15glossary_translations\x18\x03 \x03(\x0b\x32(.google.cloud.translation.v3.Translation"\xa8\x01\n\x0bTranslation\x12\x17\n\x0ftranslated_text\x18\x01 \x01(\t\x12\r\n\x05model\x18\x02 \x01(\t\x12\x1e\n\x16\x64\x65tected_language_code\x18\x04 \x01(\t\x12Q\n\x0fglossary_config\x18\x03 \x01(\x0b\x32\x38.google.cloud.translation.v3.TranslateTextGlossaryConfig"\x9f\x02\n\x15\x44\x65tectLanguageRequest\x12\x39\n\x06parent\x18\x05 \x01(\tB)\xe0\x41\x02\xfa\x41#\n!locations.googleapis.com/Location\x12\x12\n\x05model\x18\x04 \x01(\tB\x03\xe0\x41\x01\x12\x11\n\x07\x63ontent\x18\x01 \x01(\tH\x00\x12\x16\n\tmime_type\x18\x03 \x01(\tB\x03\xe0\x41\x01\x12S\n\x06labels\x18\x06 \x03(\x0b\x32>.google.cloud.translation.v3.DetectLanguageRequest.LabelsEntryB\x03\xe0\x41\x01\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x08\n\x06source"=\n\x10\x44\x65tectedLanguage\x12\x15\n\rlanguage_code\x18\x01 \x01(\t\x12\x12\n\nconfidence\x18\x02 \x01(\x02"Z\n\x16\x44\x65tectLanguageResponse\x12@\n\tlanguages\x18\x01 \x03(\x0b\x32-.google.cloud.translation.v3.DetectedLanguage"\x91\x01\n\x1cGetSupportedLanguagesRequest\x12\x39\n\x06parent\x18\x03 \x01(\tB)\xe0\x41\x02\xfa\x41#\n!locations.googleapis.com/Location\x12"\n\x15\x64isplay_language_code\x18\x01 \x01(\tB\x03\xe0\x41\x01\x12\x12\n\x05model\x18\x02 \x01(\tB\x03\xe0\x41\x01"W\n\x12SupportedLanguages\x12\x41\n\tlanguages\x18\x01 \x03(\x0b\x32..google.cloud.translation.v3.SupportedLanguage"p\n\x11SupportedLanguage\x12\x15\n\rlanguage_code\x18\x01 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x02 \x01(\t\x12\x16\n\x0esupport_source\x18\x03 \x01(\x08\x12\x16\n\x0esupport_target\x18\x04 \x01(\x08"\x1e\n\tGcsSource\x12\x11\n\tinput_uri\x18\x01 \x01(\t"m\n\x0bInputConfig\x12\x16\n\tmime_type\x18\x01 \x01(\tB\x03\xe0\x41\x01\x12<\n\ngcs_source\x18\x02 \x01(\x0b\x32&.google.cloud.translation.v3.GcsSourceH\x00\x42\x08\n\x06source"+\n\x0eGcsDestination\x12\x19\n\x11output_uri_prefix\x18\x01 \x01(\t"e\n\x0cOutputConfig\x12\x46\n\x0fgcs_destination\x18\x01 \x01(\x0b\x32+.google.cloud.translation.v3.GcsDestinationH\x00\x42\r\n\x0b\x64\x65stination"\x88\x06\n\x19\x42\x61tchTranslateTextRequest\x12\x39\n\x06parent\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\n!locations.googleapis.com/Location\x12!\n\x14source_language_code\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12"\n\x15target_language_codes\x18\x03 \x03(\tB\x03\xe0\x41\x02\x12W\n\x06models\x18\x04 \x03(\x0b\x32\x42.google.cloud.translation.v3.BatchTranslateTextRequest.ModelsEntryB\x03\xe0\x41\x01\x12\x44\n\rinput_configs\x18\x05 \x03(\x0b\x32(.google.cloud.translation.v3.InputConfigB\x03\xe0\x41\x02\x12\x45\n\routput_config\x18\x06 \x01(\x0b\x32).google.cloud.translation.v3.OutputConfigB\x03\xe0\x41\x02\x12_\n\nglossaries\x18\x07 \x03(\x0b\x32\x46.google.cloud.translation.v3.BatchTranslateTextRequest.GlossariesEntryB\x03\xe0\x41\x01\x12W\n\x06labels\x18\t \x03(\x0b\x32\x42.google.cloud.translation.v3.BatchTranslateTextRequest.LabelsEntryB\x03\xe0\x41\x01\x1a-\n\x0bModelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1ak\n\x0fGlossariesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12G\n\x05value\x18\x02 \x01(\x0b\x32\x38.google.cloud.translation.v3.TranslateTextGlossaryConfig:\x02\x38\x01\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xce\x02\n\x16\x42\x61tchTranslateMetadata\x12H\n\x05state\x18\x01 \x01(\x0e\x32\x39.google.cloud.translation.v3.BatchTranslateMetadata.State\x12\x1d\n\x15translated_characters\x18\x02 \x01(\x03\x12\x19\n\x11\x66\x61iled_characters\x18\x03 \x01(\x03\x12\x18\n\x10total_characters\x18\x04 \x01(\x03\x12/\n\x0bsubmit_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"e\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07RUNNING\x10\x01\x12\r\n\tSUCCEEDED\x10\x02\x12\n\n\x06\x46\x41ILED\x10\x03\x12\x0e\n\nCANCELLING\x10\x04\x12\r\n\tCANCELLED\x10\x05"\xcb\x01\n\x16\x42\x61tchTranslateResponse\x12\x18\n\x10total_characters\x18\x01 \x01(\x03\x12\x1d\n\x15translated_characters\x18\x02 \x01(\x03\x12\x19\n\x11\x66\x61iled_characters\x18\x03 \x01(\x03\x12/\n\x0bsubmit_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"]\n\x13GlossaryInputConfig\x12<\n\ngcs_source\x18\x01 \x01(\x0b\x32&.google.cloud.translation.v3.GcsSourceH\x00\x42\x08\n\x06source"\xfa\x04\n\x08Glossary\x12\x0c\n\x04name\x18\x01 \x01(\t\x12O\n\rlanguage_pair\x18\x03 \x01(\x0b\x32\x36.google.cloud.translation.v3.Glossary.LanguageCodePairH\x00\x12T\n\x12language_codes_set\x18\x04 \x01(\x0b\x32\x36.google.cloud.translation.v3.Glossary.LanguageCodesSetH\x00\x12\x46\n\x0cinput_config\x18\x05 \x01(\x0b\x32\x30.google.cloud.translation.v3.GlossaryInputConfig\x12\x18\n\x0b\x65ntry_count\x18\x06 \x01(\x05\x42\x03\xe0\x41\x03\x12\x34\n\x0bsubmit_time\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x31\n\x08\x65nd_time\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x1aN\n\x10LanguageCodePair\x12\x1c\n\x14source_language_code\x18\x01 \x01(\t\x12\x1c\n\x14target_language_code\x18\x02 \x01(\t\x1a*\n\x10LanguageCodesSet\x12\x16\n\x0elanguage_codes\x18\x01 \x03(\t:e\xea\x41\x62\n!translate.googleapis.com/Glossary\x12=projects/{project}/locations/{location}/glossaries/{glossary}B\x0b\n\tlanguages"\x90\x01\n\x15\x43reateGlossaryRequest\x12\x39\n\x06parent\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\n!locations.googleapis.com/Location\x12<\n\x08glossary\x18\x02 \x01(\x0b\x32%.google.cloud.translation.v3.GlossaryB\x03\xe0\x41\x02"M\n\x12GetGlossaryRequest\x12\x37\n\x04name\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\n!translate.googleapis.com/Glossary"P\n\x15\x44\x65leteGlossaryRequest\x12\x37\n\x04name\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\n!translate.googleapis.com/Glossary"\x98\x01\n\x15ListGlossariesRequest\x12\x39\n\x06parent\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\n!locations.googleapis.com/Location\x12\x16\n\tpage_size\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01\x12\x17\n\npage_token\x18\x03 \x01(\tB\x03\xe0\x41\x01\x12\x13\n\x06\x66ilter\x18\x04 \x01(\tB\x03\xe0\x41\x01"l\n\x16ListGlossariesResponse\x12\x39\n\nglossaries\x18\x01 \x03(\x0b\x32%.google.cloud.translation.v3.Glossary\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"\x88\x02\n\x16\x43reateGlossaryMetadata\x12\x0c\n\x04name\x18\x01 \x01(\t\x12H\n\x05state\x18\x02 \x01(\x0e\x32\x39.google.cloud.translation.v3.CreateGlossaryMetadata.State\x12/\n\x0bsubmit_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"e\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07RUNNING\x10\x01\x12\r\n\tSUCCEEDED\x10\x02\x12\n\n\x06\x46\x41ILED\x10\x03\x12\x0e\n\nCANCELLING\x10\x04\x12\r\n\tCANCELLED\x10\x05"\x88\x02\n\x16\x44\x65leteGlossaryMetadata\x12\x0c\n\x04name\x18\x01 \x01(\t\x12H\n\x05state\x18\x02 \x01(\x0e\x32\x39.google.cloud.translation.v3.DeleteGlossaryMetadata.State\x12/\n\x0bsubmit_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"e\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07RUNNING\x10\x01\x12\r\n\tSUCCEEDED\x10\x02\x12\n\n\x06\x46\x41ILED\x10\x03\x12\x0e\n\nCANCELLING\x10\x04\x12\r\n\tCANCELLED\x10\x05"\x85\x01\n\x16\x44\x65leteGlossaryResponse\x12\x0c\n\x04name\x18\x01 \x01(\t\x12/\n\x0bsubmit_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp2\x8e\x10\n\x12TranslationService\x12\xd4\x02\n\rTranslateText\x12\x31.google.cloud.translation.v3.TranslateTextRequest\x1a\x32.google.cloud.translation.v3.TranslateTextResponse"\xdb\x01\x82\xd3\xe4\x93\x02\x62"1/v3/{parent=projects/*/locations/*}:translateText:\x01*Z*"%/v3/{parent=projects/*}:translateText:\x01*\xda\x41$parent,target_language_code,contents\xda\x41Iparent,model,mime_type,source_language_code,target_language_code,contents\x12\x87\x02\n\x0e\x44\x65tectLanguage\x12\x32.google.cloud.translation.v3.DetectLanguageRequest\x1a\x33.google.cloud.translation.v3.DetectLanguageResponse"\x8b\x01\x82\xd3\xe4\x93\x02\x64"2/v3/{parent=projects/*/locations/*}:detectLanguage:\x01*Z+"&/v3/{parent=projects/*}:detectLanguage:\x01*\xda\x41\x1eparent,model,mime_type,content\x12\x97\x02\n\x15GetSupportedLanguages\x12\x39.google.cloud.translation.v3.GetSupportedLanguagesRequest\x1a/.google.cloud.translation.v3.SupportedLanguages"\x91\x01\x82\xd3\xe4\x93\x02\x66\x12\x36/v3/{parent=projects/*/locations/*}/supportedLanguagesZ,\x12*/v3/{parent=projects/*}/supportedLanguages\xda\x41"parent,model,display_language_code\x12\xe1\x01\n\x12\x42\x61tchTranslateText\x12\x36.google.cloud.translation.v3.BatchTranslateTextRequest\x1a\x1d.google.longrunning.Operation"t\x82\xd3\xe4\x93\x02;"6/v3/{parent=projects/*/locations/*}:batchTranslateText:\x01*\xca\x41\x30\n\x16\x42\x61tchTranslateResponse\x12\x16\x42\x61tchTranslateMetadata\x12\xdc\x01\n\x0e\x43reateGlossary\x12\x32.google.cloud.translation.v3.CreateGlossaryRequest\x1a\x1d.google.longrunning.Operation"w\x82\xd3\xe4\x93\x02:"./v3/{parent=projects/*/locations/*}/glossaries:\x08glossary\xda\x41\x0fparent,glossary\xca\x41"\n\x08Glossary\x12\x16\x43reateGlossaryMetadata\x12\xba\x01\n\x0eListGlossaries\x12\x32.google.cloud.translation.v3.ListGlossariesRequest\x1a\x33.google.cloud.translation.v3.ListGlossariesResponse"?\x82\xd3\xe4\x93\x02\x30\x12./v3/{parent=projects/*/locations/*}/glossaries\xda\x41\x06parent\x12\xa4\x01\n\x0bGetGlossary\x12/.google.cloud.translation.v3.GetGlossaryRequest\x1a%.google.cloud.translation.v3.Glossary"=\x82\xd3\xe4\x93\x02\x30\x12./v3/{name=projects/*/locations/*/glossaries/*}\xda\x41\x04name\x12\xd5\x01\n\x0e\x44\x65leteGlossary\x12\x32.google.cloud.translation.v3.DeleteGlossaryRequest\x1a\x1d.google.longrunning.Operation"p\x82\xd3\xe4\x93\x02\x30*./v3/{name=projects/*/locations/*/glossaries/*}\xda\x41\x04name\xca\x41\x30\n\x16\x44\x65leteGlossaryResponse\x12\x16\x44\x65leteGlossaryMetadata\x1a~\xca\x41\x18translate.googleapis.com\xd2\x41`https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-translationB\xd8\x01\n\x1d\x63om.google.cloud.translate.v3B\x17TranslationServiceProtoP\x01ZBgoogle.golang.org/genproto/googleapis/cloud/translate/v3;translate\xf8\x01\x01\xaa\x02\x19Google.Cloud.Translate.V3\xca\x02\x19Google\\Cloud\\Translate\\V3\xea\x02\x1cGoogle::Cloud::Translate::V3b\x06proto3' + ), + dependencies=[ + google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_client__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, + google_dot_api_dot_resource__pb2.DESCRIPTOR, + google_dot_longrunning_dot_operations__pb2.DESCRIPTOR, + google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, + ], +) + + +_BATCHTRANSLATEMETADATA_STATE = _descriptor.EnumDescriptor( + name="State", + full_name="google.cloud.translation.v3.BatchTranslateMetadata.State", + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name="STATE_UNSPECIFIED", + index=0, + number=0, + serialized_options=None, + type=None, + ), + _descriptor.EnumValueDescriptor( + name="RUNNING", index=1, number=1, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="SUCCEEDED", index=2, number=2, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="FAILED", index=3, number=3, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="CANCELLING", index=4, number=4, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="CANCELLED", index=5, number=5, serialized_options=None, type=None + ), + ], + containing_type=None, + serialized_options=None, + serialized_start=3231, + serialized_end=3332, +) +_sym_db.RegisterEnumDescriptor(_BATCHTRANSLATEMETADATA_STATE) + +_CREATEGLOSSARYMETADATA_STATE = _descriptor.EnumDescriptor( + name="State", + full_name="google.cloud.translation.v3.CreateGlossaryMetadata.State", + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name="STATE_UNSPECIFIED", + index=0, + number=0, + serialized_options=None, + type=None, + ), + _descriptor.EnumValueDescriptor( + name="RUNNING", index=1, number=1, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="SUCCEEDED", index=2, number=2, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="FAILED", index=3, number=3, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="CANCELLING", index=4, number=4, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="CANCELLED", index=5, number=5, serialized_options=None, type=None + ), + ], + containing_type=None, + serialized_options=None, + serialized_start=3231, + serialized_end=3332, +) +_sym_db.RegisterEnumDescriptor(_CREATEGLOSSARYMETADATA_STATE) + +_DELETEGLOSSARYMETADATA_STATE = _descriptor.EnumDescriptor( + name="State", + full_name="google.cloud.translation.v3.DeleteGlossaryMetadata.State", + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name="STATE_UNSPECIFIED", + index=0, + number=0, + serialized_options=None, + type=None, + ), + _descriptor.EnumValueDescriptor( + name="RUNNING", index=1, number=1, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="SUCCEEDED", index=2, number=2, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="FAILED", index=3, number=3, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="CANCELLING", index=4, number=4, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="CANCELLED", index=5, number=5, serialized_options=None, type=None + ), + ], + containing_type=None, + serialized_options=None, + serialized_start=3231, + serialized_end=3332, +) +_sym_db.RegisterEnumDescriptor(_DELETEGLOSSARYMETADATA_STATE) + + +_TRANSLATETEXTGLOSSARYCONFIG = _descriptor.Descriptor( + name="TranslateTextGlossaryConfig", + full_name="google.cloud.translation.v3.TranslateTextGlossaryConfig", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="glossary", + full_name="google.cloud.translation.v3.TranslateTextGlossaryConfig.glossary", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\002"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="ignore_case", + full_name="google.cloud.translation.v3.TranslateTextGlossaryConfig.ignore_case", + index=1, + number=2, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=277, + serialized_end=355, +) + + +_TRANSLATETEXTREQUEST_LABELSENTRY = _descriptor.Descriptor( + name="LabelsEntry", + full_name="google.cloud.translation.v3.TranslateTextRequest.LabelsEntry", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="key", + full_name="google.cloud.translation.v3.TranslateTextRequest.LabelsEntry.key", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="value", + full_name="google.cloud.translation.v3.TranslateTextRequest.LabelsEntry.value", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=_b("8\001"), + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=750, + serialized_end=795, +) + +_TRANSLATETEXTREQUEST = _descriptor.Descriptor( + name="TranslateTextRequest", + full_name="google.cloud.translation.v3.TranslateTextRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="contents", + full_name="google.cloud.translation.v3.TranslateTextRequest.contents", + index=0, + number=1, + type=9, + cpp_type=9, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\002"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="mime_type", + full_name="google.cloud.translation.v3.TranslateTextRequest.mime_type", + index=1, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="source_language_code", + full_name="google.cloud.translation.v3.TranslateTextRequest.source_language_code", + index=2, + number=4, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="target_language_code", + full_name="google.cloud.translation.v3.TranslateTextRequest.target_language_code", + index=3, + number=5, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\002"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="parent", + full_name="google.cloud.translation.v3.TranslateTextRequest.parent", + index=4, + number=8, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b( + "\340A\002\372A#\n!locations.googleapis.com/Location" + ), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="model", + full_name="google.cloud.translation.v3.TranslateTextRequest.model", + index=5, + number=6, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="glossary_config", + full_name="google.cloud.translation.v3.TranslateTextRequest.glossary_config", + index=6, + number=7, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="labels", + full_name="google.cloud.translation.v3.TranslateTextRequest.labels", + index=7, + number=10, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[_TRANSLATETEXTREQUEST_LABELSENTRY], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=358, + serialized_end=795, +) + + +_TRANSLATETEXTRESPONSE = _descriptor.Descriptor( + name="TranslateTextResponse", + full_name="google.cloud.translation.v3.TranslateTextResponse", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="translations", + full_name="google.cloud.translation.v3.TranslateTextResponse.translations", + index=0, + number=1, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="glossary_translations", + full_name="google.cloud.translation.v3.TranslateTextResponse.glossary_translations", + index=1, + number=3, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=798, + serialized_end=958, +) + + +_TRANSLATION = _descriptor.Descriptor( + name="Translation", + full_name="google.cloud.translation.v3.Translation", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="translated_text", + full_name="google.cloud.translation.v3.Translation.translated_text", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="model", + full_name="google.cloud.translation.v3.Translation.model", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="detected_language_code", + full_name="google.cloud.translation.v3.Translation.detected_language_code", + index=2, + number=4, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="glossary_config", + full_name="google.cloud.translation.v3.Translation.glossary_config", + index=3, + number=3, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=961, + serialized_end=1129, +) + + +_DETECTLANGUAGEREQUEST_LABELSENTRY = _descriptor.Descriptor( + name="LabelsEntry", + full_name="google.cloud.translation.v3.DetectLanguageRequest.LabelsEntry", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="key", + full_name="google.cloud.translation.v3.DetectLanguageRequest.LabelsEntry.key", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="value", + full_name="google.cloud.translation.v3.DetectLanguageRequest.LabelsEntry.value", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=_b("8\001"), + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=750, + serialized_end=795, +) + +_DETECTLANGUAGEREQUEST = _descriptor.Descriptor( + name="DetectLanguageRequest", + full_name="google.cloud.translation.v3.DetectLanguageRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="parent", + full_name="google.cloud.translation.v3.DetectLanguageRequest.parent", + index=0, + number=5, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b( + "\340A\002\372A#\n!locations.googleapis.com/Location" + ), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="model", + full_name="google.cloud.translation.v3.DetectLanguageRequest.model", + index=1, + number=4, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="content", + full_name="google.cloud.translation.v3.DetectLanguageRequest.content", + index=2, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="mime_type", + full_name="google.cloud.translation.v3.DetectLanguageRequest.mime_type", + index=3, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="labels", + full_name="google.cloud.translation.v3.DetectLanguageRequest.labels", + index=4, + number=6, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[_DETECTLANGUAGEREQUEST_LABELSENTRY], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name="source", + full_name="google.cloud.translation.v3.DetectLanguageRequest.source", + index=0, + containing_type=None, + fields=[], + ) + ], + serialized_start=1132, + serialized_end=1419, +) + + +_DETECTEDLANGUAGE = _descriptor.Descriptor( + name="DetectedLanguage", + full_name="google.cloud.translation.v3.DetectedLanguage", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="language_code", + full_name="google.cloud.translation.v3.DetectedLanguage.language_code", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="confidence", + full_name="google.cloud.translation.v3.DetectedLanguage.confidence", + index=1, + number=2, + type=2, + cpp_type=6, + label=1, + has_default_value=False, + default_value=float(0), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1421, + serialized_end=1482, +) + + +_DETECTLANGUAGERESPONSE = _descriptor.Descriptor( + name="DetectLanguageResponse", + full_name="google.cloud.translation.v3.DetectLanguageResponse", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="languages", + full_name="google.cloud.translation.v3.DetectLanguageResponse.languages", + index=0, + number=1, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1484, + serialized_end=1574, +) + + +_GETSUPPORTEDLANGUAGESREQUEST = _descriptor.Descriptor( + name="GetSupportedLanguagesRequest", + full_name="google.cloud.translation.v3.GetSupportedLanguagesRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="parent", + full_name="google.cloud.translation.v3.GetSupportedLanguagesRequest.parent", + index=0, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b( + "\340A\002\372A#\n!locations.googleapis.com/Location" + ), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="display_language_code", + full_name="google.cloud.translation.v3.GetSupportedLanguagesRequest.display_language_code", + index=1, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="model", + full_name="google.cloud.translation.v3.GetSupportedLanguagesRequest.model", + index=2, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1577, + serialized_end=1722, +) + + +_SUPPORTEDLANGUAGES = _descriptor.Descriptor( + name="SupportedLanguages", + full_name="google.cloud.translation.v3.SupportedLanguages", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="languages", + full_name="google.cloud.translation.v3.SupportedLanguages.languages", + index=0, + number=1, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1724, + serialized_end=1811, +) + + +_SUPPORTEDLANGUAGE = _descriptor.Descriptor( + name="SupportedLanguage", + full_name="google.cloud.translation.v3.SupportedLanguage", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="language_code", + full_name="google.cloud.translation.v3.SupportedLanguage.language_code", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="display_name", + full_name="google.cloud.translation.v3.SupportedLanguage.display_name", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="support_source", + full_name="google.cloud.translation.v3.SupportedLanguage.support_source", + index=2, + number=3, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="support_target", + full_name="google.cloud.translation.v3.SupportedLanguage.support_target", + index=3, + number=4, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1813, + serialized_end=1925, +) + + +_GCSSOURCE = _descriptor.Descriptor( + name="GcsSource", + full_name="google.cloud.translation.v3.GcsSource", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="input_uri", + full_name="google.cloud.translation.v3.GcsSource.input_uri", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1927, + serialized_end=1957, +) + + +_INPUTCONFIG = _descriptor.Descriptor( + name="InputConfig", + full_name="google.cloud.translation.v3.InputConfig", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="mime_type", + full_name="google.cloud.translation.v3.InputConfig.mime_type", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="gcs_source", + full_name="google.cloud.translation.v3.InputConfig.gcs_source", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name="source", + full_name="google.cloud.translation.v3.InputConfig.source", + index=0, + containing_type=None, + fields=[], + ) + ], + serialized_start=1959, + serialized_end=2068, +) + + +_GCSDESTINATION = _descriptor.Descriptor( + name="GcsDestination", + full_name="google.cloud.translation.v3.GcsDestination", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="output_uri_prefix", + full_name="google.cloud.translation.v3.GcsDestination.output_uri_prefix", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2070, + serialized_end=2113, +) + + +_OUTPUTCONFIG = _descriptor.Descriptor( + name="OutputConfig", + full_name="google.cloud.translation.v3.OutputConfig", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="gcs_destination", + full_name="google.cloud.translation.v3.OutputConfig.gcs_destination", + index=0, + number=1, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name="destination", + full_name="google.cloud.translation.v3.OutputConfig.destination", + index=0, + containing_type=None, + fields=[], + ) + ], + serialized_start=2115, + serialized_end=2216, +) + + +_BATCHTRANSLATETEXTREQUEST_MODELSENTRY = _descriptor.Descriptor( + name="ModelsEntry", + full_name="google.cloud.translation.v3.BatchTranslateTextRequest.ModelsEntry", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="key", + full_name="google.cloud.translation.v3.BatchTranslateTextRequest.ModelsEntry.key", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="value", + full_name="google.cloud.translation.v3.BatchTranslateTextRequest.ModelsEntry.value", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=_b("8\001"), + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2794, + serialized_end=2839, +) + +_BATCHTRANSLATETEXTREQUEST_GLOSSARIESENTRY = _descriptor.Descriptor( + name="GlossariesEntry", + full_name="google.cloud.translation.v3.BatchTranslateTextRequest.GlossariesEntry", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="key", + full_name="google.cloud.translation.v3.BatchTranslateTextRequest.GlossariesEntry.key", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="value", + full_name="google.cloud.translation.v3.BatchTranslateTextRequest.GlossariesEntry.value", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=_b("8\001"), + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2841, + serialized_end=2948, +) + +_BATCHTRANSLATETEXTREQUEST_LABELSENTRY = _descriptor.Descriptor( + name="LabelsEntry", + full_name="google.cloud.translation.v3.BatchTranslateTextRequest.LabelsEntry", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="key", + full_name="google.cloud.translation.v3.BatchTranslateTextRequest.LabelsEntry.key", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="value", + full_name="google.cloud.translation.v3.BatchTranslateTextRequest.LabelsEntry.value", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=_b("8\001"), + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=750, + serialized_end=795, +) + +_BATCHTRANSLATETEXTREQUEST = _descriptor.Descriptor( + name="BatchTranslateTextRequest", + full_name="google.cloud.translation.v3.BatchTranslateTextRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="parent", + full_name="google.cloud.translation.v3.BatchTranslateTextRequest.parent", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b( + "\340A\002\372A#\n!locations.googleapis.com/Location" + ), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="source_language_code", + full_name="google.cloud.translation.v3.BatchTranslateTextRequest.source_language_code", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\002"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="target_language_codes", + full_name="google.cloud.translation.v3.BatchTranslateTextRequest.target_language_codes", + index=2, + number=3, + type=9, + cpp_type=9, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\002"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="models", + full_name="google.cloud.translation.v3.BatchTranslateTextRequest.models", + index=3, + number=4, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="input_configs", + full_name="google.cloud.translation.v3.BatchTranslateTextRequest.input_configs", + index=4, + number=5, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\002"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="output_config", + full_name="google.cloud.translation.v3.BatchTranslateTextRequest.output_config", + index=5, + number=6, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\002"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="glossaries", + full_name="google.cloud.translation.v3.BatchTranslateTextRequest.glossaries", + index=6, + number=7, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="labels", + full_name="google.cloud.translation.v3.BatchTranslateTextRequest.labels", + index=7, + number=9, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[ + _BATCHTRANSLATETEXTREQUEST_MODELSENTRY, + _BATCHTRANSLATETEXTREQUEST_GLOSSARIESENTRY, + _BATCHTRANSLATETEXTREQUEST_LABELSENTRY, + ], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2219, + serialized_end=2995, +) + + +_BATCHTRANSLATEMETADATA = _descriptor.Descriptor( + name="BatchTranslateMetadata", + full_name="google.cloud.translation.v3.BatchTranslateMetadata", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="state", + full_name="google.cloud.translation.v3.BatchTranslateMetadata.state", + index=0, + number=1, + type=14, + cpp_type=8, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="translated_characters", + full_name="google.cloud.translation.v3.BatchTranslateMetadata.translated_characters", + index=1, + number=2, + type=3, + cpp_type=2, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="failed_characters", + full_name="google.cloud.translation.v3.BatchTranslateMetadata.failed_characters", + index=2, + number=3, + type=3, + cpp_type=2, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="total_characters", + full_name="google.cloud.translation.v3.BatchTranslateMetadata.total_characters", + index=3, + number=4, + type=3, + cpp_type=2, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="submit_time", + full_name="google.cloud.translation.v3.BatchTranslateMetadata.submit_time", + index=4, + number=5, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[_BATCHTRANSLATEMETADATA_STATE], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2998, + serialized_end=3332, +) + + +_BATCHTRANSLATERESPONSE = _descriptor.Descriptor( + name="BatchTranslateResponse", + full_name="google.cloud.translation.v3.BatchTranslateResponse", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="total_characters", + full_name="google.cloud.translation.v3.BatchTranslateResponse.total_characters", + index=0, + number=1, + type=3, + cpp_type=2, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="translated_characters", + full_name="google.cloud.translation.v3.BatchTranslateResponse.translated_characters", + index=1, + number=2, + type=3, + cpp_type=2, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="failed_characters", + full_name="google.cloud.translation.v3.BatchTranslateResponse.failed_characters", + index=2, + number=3, + type=3, + cpp_type=2, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="submit_time", + full_name="google.cloud.translation.v3.BatchTranslateResponse.submit_time", + index=3, + number=4, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="end_time", + full_name="google.cloud.translation.v3.BatchTranslateResponse.end_time", + index=4, + number=5, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=3335, + serialized_end=3538, +) + + +_GLOSSARYINPUTCONFIG = _descriptor.Descriptor( + name="GlossaryInputConfig", + full_name="google.cloud.translation.v3.GlossaryInputConfig", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="gcs_source", + full_name="google.cloud.translation.v3.GlossaryInputConfig.gcs_source", + index=0, + number=1, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name="source", + full_name="google.cloud.translation.v3.GlossaryInputConfig.source", + index=0, + containing_type=None, + fields=[], + ) + ], + serialized_start=3540, + serialized_end=3633, +) + + +_GLOSSARY_LANGUAGECODEPAIR = _descriptor.Descriptor( + name="LanguageCodePair", + full_name="google.cloud.translation.v3.Glossary.LanguageCodePair", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="source_language_code", + full_name="google.cloud.translation.v3.Glossary.LanguageCodePair.source_language_code", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="target_language_code", + full_name="google.cloud.translation.v3.Glossary.LanguageCodePair.target_language_code", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=4032, + serialized_end=4110, +) + +_GLOSSARY_LANGUAGECODESSET = _descriptor.Descriptor( + name="LanguageCodesSet", + full_name="google.cloud.translation.v3.Glossary.LanguageCodesSet", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="language_codes", + full_name="google.cloud.translation.v3.Glossary.LanguageCodesSet.language_codes", + index=0, + number=1, + type=9, + cpp_type=9, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=4112, + serialized_end=4154, +) + +_GLOSSARY = _descriptor.Descriptor( + name="Glossary", + full_name="google.cloud.translation.v3.Glossary", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="name", + full_name="google.cloud.translation.v3.Glossary.name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="language_pair", + full_name="google.cloud.translation.v3.Glossary.language_pair", + index=1, + number=3, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="language_codes_set", + full_name="google.cloud.translation.v3.Glossary.language_codes_set", + index=2, + number=4, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="input_config", + full_name="google.cloud.translation.v3.Glossary.input_config", + index=3, + number=5, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="entry_count", + full_name="google.cloud.translation.v3.Glossary.entry_count", + index=4, + number=6, + type=5, + cpp_type=1, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\003"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="submit_time", + full_name="google.cloud.translation.v3.Glossary.submit_time", + index=5, + number=7, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\003"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="end_time", + full_name="google.cloud.translation.v3.Glossary.end_time", + index=6, + number=8, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\003"), + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[_GLOSSARY_LANGUAGECODEPAIR, _GLOSSARY_LANGUAGECODESSET], + enum_types=[], + serialized_options=_b( + "\352Ab\n!translate.googleapis.com/Glossary\022=projects/{project}/locations/{location}/glossaries/{glossary}" + ), + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name="languages", + full_name="google.cloud.translation.v3.Glossary.languages", + index=0, + containing_type=None, + fields=[], + ) + ], + serialized_start=3636, + serialized_end=4270, +) + + +_CREATEGLOSSARYREQUEST = _descriptor.Descriptor( + name="CreateGlossaryRequest", + full_name="google.cloud.translation.v3.CreateGlossaryRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="parent", + full_name="google.cloud.translation.v3.CreateGlossaryRequest.parent", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b( + "\340A\002\372A#\n!locations.googleapis.com/Location" + ), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="glossary", + full_name="google.cloud.translation.v3.CreateGlossaryRequest.glossary", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\002"), + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=4273, + serialized_end=4417, +) + + +_GETGLOSSARYREQUEST = _descriptor.Descriptor( + name="GetGlossaryRequest", + full_name="google.cloud.translation.v3.GetGlossaryRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="name", + full_name="google.cloud.translation.v3.GetGlossaryRequest.name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b( + "\340A\002\372A#\n!translate.googleapis.com/Glossary" + ), + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=4419, + serialized_end=4496, +) + + +_DELETEGLOSSARYREQUEST = _descriptor.Descriptor( + name="DeleteGlossaryRequest", + full_name="google.cloud.translation.v3.DeleteGlossaryRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="name", + full_name="google.cloud.translation.v3.DeleteGlossaryRequest.name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b( + "\340A\002\372A#\n!translate.googleapis.com/Glossary" + ), + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=4498, + serialized_end=4578, +) + + +_LISTGLOSSARIESREQUEST = _descriptor.Descriptor( + name="ListGlossariesRequest", + full_name="google.cloud.translation.v3.ListGlossariesRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="parent", + full_name="google.cloud.translation.v3.ListGlossariesRequest.parent", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b( + "\340A\002\372A#\n!locations.googleapis.com/Location" + ), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="page_size", + full_name="google.cloud.translation.v3.ListGlossariesRequest.page_size", + index=1, + number=2, + type=5, + cpp_type=1, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="page_token", + full_name="google.cloud.translation.v3.ListGlossariesRequest.page_token", + index=2, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="filter", + full_name="google.cloud.translation.v3.ListGlossariesRequest.filter", + index=3, + number=4, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=4581, + serialized_end=4733, +) + + +_LISTGLOSSARIESRESPONSE = _descriptor.Descriptor( + name="ListGlossariesResponse", + full_name="google.cloud.translation.v3.ListGlossariesResponse", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="glossaries", + full_name="google.cloud.translation.v3.ListGlossariesResponse.glossaries", + index=0, + number=1, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="next_page_token", + full_name="google.cloud.translation.v3.ListGlossariesResponse.next_page_token", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=4735, + serialized_end=4843, +) + + +_CREATEGLOSSARYMETADATA = _descriptor.Descriptor( + name="CreateGlossaryMetadata", + full_name="google.cloud.translation.v3.CreateGlossaryMetadata", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="name", + full_name="google.cloud.translation.v3.CreateGlossaryMetadata.name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="state", + full_name="google.cloud.translation.v3.CreateGlossaryMetadata.state", + index=1, + number=2, + type=14, + cpp_type=8, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="submit_time", + full_name="google.cloud.translation.v3.CreateGlossaryMetadata.submit_time", + index=2, + number=3, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[_CREATEGLOSSARYMETADATA_STATE], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=4846, + serialized_end=5110, +) + + +_DELETEGLOSSARYMETADATA = _descriptor.Descriptor( + name="DeleteGlossaryMetadata", + full_name="google.cloud.translation.v3.DeleteGlossaryMetadata", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="name", + full_name="google.cloud.translation.v3.DeleteGlossaryMetadata.name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="state", + full_name="google.cloud.translation.v3.DeleteGlossaryMetadata.state", + index=1, + number=2, + type=14, + cpp_type=8, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="submit_time", + full_name="google.cloud.translation.v3.DeleteGlossaryMetadata.submit_time", + index=2, + number=3, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[_DELETEGLOSSARYMETADATA_STATE], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=5113, + serialized_end=5377, +) + + +_DELETEGLOSSARYRESPONSE = _descriptor.Descriptor( + name="DeleteGlossaryResponse", + full_name="google.cloud.translation.v3.DeleteGlossaryResponse", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="name", + full_name="google.cloud.translation.v3.DeleteGlossaryResponse.name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="submit_time", + full_name="google.cloud.translation.v3.DeleteGlossaryResponse.submit_time", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="end_time", + full_name="google.cloud.translation.v3.DeleteGlossaryResponse.end_time", + index=2, + number=3, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=5380, + serialized_end=5513, +) + +_TRANSLATETEXTREQUEST_LABELSENTRY.containing_type = _TRANSLATETEXTREQUEST +_TRANSLATETEXTREQUEST.fields_by_name[ + "glossary_config" +].message_type = _TRANSLATETEXTGLOSSARYCONFIG +_TRANSLATETEXTREQUEST.fields_by_name[ + "labels" +].message_type = _TRANSLATETEXTREQUEST_LABELSENTRY +_TRANSLATETEXTRESPONSE.fields_by_name["translations"].message_type = _TRANSLATION +_TRANSLATETEXTRESPONSE.fields_by_name[ + "glossary_translations" +].message_type = _TRANSLATION +_TRANSLATION.fields_by_name[ + "glossary_config" +].message_type = _TRANSLATETEXTGLOSSARYCONFIG +_DETECTLANGUAGEREQUEST_LABELSENTRY.containing_type = _DETECTLANGUAGEREQUEST +_DETECTLANGUAGEREQUEST.fields_by_name[ + "labels" +].message_type = _DETECTLANGUAGEREQUEST_LABELSENTRY +_DETECTLANGUAGEREQUEST.oneofs_by_name["source"].fields.append( + _DETECTLANGUAGEREQUEST.fields_by_name["content"] +) +_DETECTLANGUAGEREQUEST.fields_by_name[ + "content" +].containing_oneof = _DETECTLANGUAGEREQUEST.oneofs_by_name["source"] +_DETECTLANGUAGERESPONSE.fields_by_name["languages"].message_type = _DETECTEDLANGUAGE +_SUPPORTEDLANGUAGES.fields_by_name["languages"].message_type = _SUPPORTEDLANGUAGE +_INPUTCONFIG.fields_by_name["gcs_source"].message_type = _GCSSOURCE +_INPUTCONFIG.oneofs_by_name["source"].fields.append( + _INPUTCONFIG.fields_by_name["gcs_source"] +) +_INPUTCONFIG.fields_by_name[ + "gcs_source" +].containing_oneof = _INPUTCONFIG.oneofs_by_name["source"] +_OUTPUTCONFIG.fields_by_name["gcs_destination"].message_type = _GCSDESTINATION +_OUTPUTCONFIG.oneofs_by_name["destination"].fields.append( + _OUTPUTCONFIG.fields_by_name["gcs_destination"] +) +_OUTPUTCONFIG.fields_by_name[ + "gcs_destination" +].containing_oneof = _OUTPUTCONFIG.oneofs_by_name["destination"] +_BATCHTRANSLATETEXTREQUEST_MODELSENTRY.containing_type = _BATCHTRANSLATETEXTREQUEST +_BATCHTRANSLATETEXTREQUEST_GLOSSARIESENTRY.fields_by_name[ + "value" +].message_type = _TRANSLATETEXTGLOSSARYCONFIG +_BATCHTRANSLATETEXTREQUEST_GLOSSARIESENTRY.containing_type = _BATCHTRANSLATETEXTREQUEST +_BATCHTRANSLATETEXTREQUEST_LABELSENTRY.containing_type = _BATCHTRANSLATETEXTREQUEST +_BATCHTRANSLATETEXTREQUEST.fields_by_name[ + "models" +].message_type = _BATCHTRANSLATETEXTREQUEST_MODELSENTRY +_BATCHTRANSLATETEXTREQUEST.fields_by_name["input_configs"].message_type = _INPUTCONFIG +_BATCHTRANSLATETEXTREQUEST.fields_by_name["output_config"].message_type = _OUTPUTCONFIG +_BATCHTRANSLATETEXTREQUEST.fields_by_name[ + "glossaries" +].message_type = _BATCHTRANSLATETEXTREQUEST_GLOSSARIESENTRY +_BATCHTRANSLATETEXTREQUEST.fields_by_name[ + "labels" +].message_type = _BATCHTRANSLATETEXTREQUEST_LABELSENTRY +_BATCHTRANSLATEMETADATA.fields_by_name[ + "state" +].enum_type = _BATCHTRANSLATEMETADATA_STATE +_BATCHTRANSLATEMETADATA.fields_by_name[ + "submit_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_BATCHTRANSLATEMETADATA_STATE.containing_type = _BATCHTRANSLATEMETADATA +_BATCHTRANSLATERESPONSE.fields_by_name[ + "submit_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_BATCHTRANSLATERESPONSE.fields_by_name[ + "end_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_GLOSSARYINPUTCONFIG.fields_by_name["gcs_source"].message_type = _GCSSOURCE +_GLOSSARYINPUTCONFIG.oneofs_by_name["source"].fields.append( + _GLOSSARYINPUTCONFIG.fields_by_name["gcs_source"] +) +_GLOSSARYINPUTCONFIG.fields_by_name[ + "gcs_source" +].containing_oneof = _GLOSSARYINPUTCONFIG.oneofs_by_name["source"] +_GLOSSARY_LANGUAGECODEPAIR.containing_type = _GLOSSARY +_GLOSSARY_LANGUAGECODESSET.containing_type = _GLOSSARY +_GLOSSARY.fields_by_name["language_pair"].message_type = _GLOSSARY_LANGUAGECODEPAIR +_GLOSSARY.fields_by_name["language_codes_set"].message_type = _GLOSSARY_LANGUAGECODESSET +_GLOSSARY.fields_by_name["input_config"].message_type = _GLOSSARYINPUTCONFIG +_GLOSSARY.fields_by_name[ + "submit_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_GLOSSARY.fields_by_name[ + "end_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_GLOSSARY.oneofs_by_name["languages"].fields.append( + _GLOSSARY.fields_by_name["language_pair"] +) +_GLOSSARY.fields_by_name["language_pair"].containing_oneof = _GLOSSARY.oneofs_by_name[ + "languages" +] +_GLOSSARY.oneofs_by_name["languages"].fields.append( + _GLOSSARY.fields_by_name["language_codes_set"] +) +_GLOSSARY.fields_by_name[ + "language_codes_set" +].containing_oneof = _GLOSSARY.oneofs_by_name["languages"] +_CREATEGLOSSARYREQUEST.fields_by_name["glossary"].message_type = _GLOSSARY +_LISTGLOSSARIESRESPONSE.fields_by_name["glossaries"].message_type = _GLOSSARY +_CREATEGLOSSARYMETADATA.fields_by_name[ + "state" +].enum_type = _CREATEGLOSSARYMETADATA_STATE +_CREATEGLOSSARYMETADATA.fields_by_name[ + "submit_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_CREATEGLOSSARYMETADATA_STATE.containing_type = _CREATEGLOSSARYMETADATA +_DELETEGLOSSARYMETADATA.fields_by_name[ + "state" +].enum_type = _DELETEGLOSSARYMETADATA_STATE +_DELETEGLOSSARYMETADATA.fields_by_name[ + "submit_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_DELETEGLOSSARYMETADATA_STATE.containing_type = _DELETEGLOSSARYMETADATA +_DELETEGLOSSARYRESPONSE.fields_by_name[ + "submit_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_DELETEGLOSSARYRESPONSE.fields_by_name[ + "end_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +DESCRIPTOR.message_types_by_name[ + "TranslateTextGlossaryConfig" +] = _TRANSLATETEXTGLOSSARYCONFIG +DESCRIPTOR.message_types_by_name["TranslateTextRequest"] = _TRANSLATETEXTREQUEST +DESCRIPTOR.message_types_by_name["TranslateTextResponse"] = _TRANSLATETEXTRESPONSE +DESCRIPTOR.message_types_by_name["Translation"] = _TRANSLATION +DESCRIPTOR.message_types_by_name["DetectLanguageRequest"] = _DETECTLANGUAGEREQUEST +DESCRIPTOR.message_types_by_name["DetectedLanguage"] = _DETECTEDLANGUAGE +DESCRIPTOR.message_types_by_name["DetectLanguageResponse"] = _DETECTLANGUAGERESPONSE +DESCRIPTOR.message_types_by_name[ + "GetSupportedLanguagesRequest" +] = _GETSUPPORTEDLANGUAGESREQUEST +DESCRIPTOR.message_types_by_name["SupportedLanguages"] = _SUPPORTEDLANGUAGES +DESCRIPTOR.message_types_by_name["SupportedLanguage"] = _SUPPORTEDLANGUAGE +DESCRIPTOR.message_types_by_name["GcsSource"] = _GCSSOURCE +DESCRIPTOR.message_types_by_name["InputConfig"] = _INPUTCONFIG +DESCRIPTOR.message_types_by_name["GcsDestination"] = _GCSDESTINATION +DESCRIPTOR.message_types_by_name["OutputConfig"] = _OUTPUTCONFIG +DESCRIPTOR.message_types_by_name[ + "BatchTranslateTextRequest" +] = _BATCHTRANSLATETEXTREQUEST +DESCRIPTOR.message_types_by_name["BatchTranslateMetadata"] = _BATCHTRANSLATEMETADATA +DESCRIPTOR.message_types_by_name["BatchTranslateResponse"] = _BATCHTRANSLATERESPONSE +DESCRIPTOR.message_types_by_name["GlossaryInputConfig"] = _GLOSSARYINPUTCONFIG +DESCRIPTOR.message_types_by_name["Glossary"] = _GLOSSARY +DESCRIPTOR.message_types_by_name["CreateGlossaryRequest"] = _CREATEGLOSSARYREQUEST +DESCRIPTOR.message_types_by_name["GetGlossaryRequest"] = _GETGLOSSARYREQUEST +DESCRIPTOR.message_types_by_name["DeleteGlossaryRequest"] = _DELETEGLOSSARYREQUEST +DESCRIPTOR.message_types_by_name["ListGlossariesRequest"] = _LISTGLOSSARIESREQUEST +DESCRIPTOR.message_types_by_name["ListGlossariesResponse"] = _LISTGLOSSARIESRESPONSE +DESCRIPTOR.message_types_by_name["CreateGlossaryMetadata"] = _CREATEGLOSSARYMETADATA +DESCRIPTOR.message_types_by_name["DeleteGlossaryMetadata"] = _DELETEGLOSSARYMETADATA +DESCRIPTOR.message_types_by_name["DeleteGlossaryResponse"] = _DELETEGLOSSARYRESPONSE +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +TranslateTextGlossaryConfig = _reflection.GeneratedProtocolMessageType( + "TranslateTextGlossaryConfig", + (_message.Message,), + dict( + DESCRIPTOR=_TRANSLATETEXTGLOSSARYCONFIG, + __module__="google.cloud.translation_v3.proto.translation_service_pb2", + __doc__="""Configures which glossary should be used for a specific target language, + and defines options for applying that glossary. + + + Attributes: + glossary: + Required. Specifies the glossary used for this translation. + Use this format: projects/\ */locations/*/glossaries/\* + ignore_case: + Optional. Indicates match is case-insensitive. Default value + is false if missing. + """, + # @@protoc_insertion_point(class_scope:google.cloud.translation.v3.TranslateTextGlossaryConfig) + ), +) +_sym_db.RegisterMessage(TranslateTextGlossaryConfig) + +TranslateTextRequest = _reflection.GeneratedProtocolMessageType( + "TranslateTextRequest", + (_message.Message,), + dict( + LabelsEntry=_reflection.GeneratedProtocolMessageType( + "LabelsEntry", + (_message.Message,), + dict( + DESCRIPTOR=_TRANSLATETEXTREQUEST_LABELSENTRY, + __module__="google.cloud.translation_v3.proto.translation_service_pb2" + # @@protoc_insertion_point(class_scope:google.cloud.translation.v3.TranslateTextRequest.LabelsEntry) + ), + ), + DESCRIPTOR=_TRANSLATETEXTREQUEST, + __module__="google.cloud.translation_v3.proto.translation_service_pb2", + __doc__="""The request message for synchronous translation. + + + Attributes: + contents: + Required. The content of the input in string format. We + recommend the total content be less than 30k codepoints. Use + BatchTranslateText for larger text. + mime_type: + Optional. The format of the source text, for example, + "text/html", "text/plain". If left blank, the MIME type + defaults to "text/html". + source_language_code: + Optional. The BCP-47 language code of the input text if known, + for example, "en-US" or "sr-Latn". Supported language codes + are listed in Language Support. If the source language isn't + specified, the API attempts to identify the source language + automatically and returns the source language within the + response. + target_language_code: + Required. The BCP-47 language code to use for translation of + the input text, set to one of the language codes listed in + Language Support. + parent: + Required. Project or location to make a call. Must refer to a + caller's project. Format: ``projects/{project-number-or-id}`` + or ``projects/{project-number-or-id}/locations/{location- + id}``. For global calls, use ``projects/{project-number-or- + id}/locations/global`` or ``projects/{project-number-or-id}``. + Non-global location is required for requests using AutoML + models or custom glossaries. Models and glossaries must be + within the same region (have same location-id), otherwise an + INVALID\_ARGUMENT (400) error is returned. + model: + Optional. The ``model`` type requested for this translation. + The format depends on model type: - AutoML Translation + models: ``projects/{project-number-or- + id}/locations/{location-id}/models/{model-id}`` - General + (built-in) models: ``projects/{project-number-or- + id}/locations/{location-id}/models/general/nmt``, + ``projects/{project-number-or-id}/locations/{location- + id}/models/general/base`` For global (non-regionalized) + requests, use ``location-id`` ``global``. For example, + ``projects/{project-number-or- + id}/locations/global/models/general/nmt``. If missing, the + system decides which google base model to use. + glossary_config: + Optional. Glossary to be applied. The glossary must be within + the same region (have the same location-id) as the model, + otherwise an INVALID\_ARGUMENT (400) error is returned. + labels: + Optional. The labels with user-defined metadata for the + request. Label keys and values can be no longer than 63 + characters (Unicode codepoints), can only contain lowercase + letters, numeric characters, underscores and dashes. + International characters are allowed. Label values are + optional. Label keys must start with a letter. See + https://cloud.google.com/translate/docs/labels for more + information. + """, + # @@protoc_insertion_point(class_scope:google.cloud.translation.v3.TranslateTextRequest) + ), +) +_sym_db.RegisterMessage(TranslateTextRequest) +_sym_db.RegisterMessage(TranslateTextRequest.LabelsEntry) + +TranslateTextResponse = _reflection.GeneratedProtocolMessageType( + "TranslateTextResponse", + (_message.Message,), + dict( + DESCRIPTOR=_TRANSLATETEXTRESPONSE, + __module__="google.cloud.translation_v3.proto.translation_service_pb2", + __doc__=""" + Attributes: + translations: + Text translation responses with no glossary applied. This + field has the same length as [``contents``][google.cloud.trans + lation.v3.TranslateTextRequest.contents]. + glossary_translations: + Text translation responses if a glossary is provided in the + request. This can be the same as [``translations``][google.clo + ud.translation.v3.TranslateTextResponse.translations] if no + terms apply. This field has the same length as [``contents``][ + google.cloud.translation.v3.TranslateTextRequest.contents]. + """, + # @@protoc_insertion_point(class_scope:google.cloud.translation.v3.TranslateTextResponse) + ), +) +_sym_db.RegisterMessage(TranslateTextResponse) + +Translation = _reflection.GeneratedProtocolMessageType( + "Translation", + (_message.Message,), + dict( + DESCRIPTOR=_TRANSLATION, + __module__="google.cloud.translation_v3.proto.translation_service_pb2", + __doc__="""A single translation response. + + + Attributes: + translated_text: + Text translated into the target language. + model: + Only present when ``model`` is present in the request. + ``model`` here is normalized to have project number. For + example: If the ``model`` requested in TranslationTextRequest + is ``projects/{project-id}/locations/{location- + id}/models/general/nmt`` then ``model`` here would be + normalized to ``projects/{project-number}/locations/{location- + id}/models/general/nmt``. + detected_language_code: + The BCP-47 language code of source text in the initial + request, detected automatically, if no source language was + passed within the initial request. If the source language was + passed, auto-detection of the language does not occur and this + field is empty. + glossary_config: + The ``glossary_config`` used for this translation. + """, + # @@protoc_insertion_point(class_scope:google.cloud.translation.v3.Translation) + ), +) +_sym_db.RegisterMessage(Translation) + +DetectLanguageRequest = _reflection.GeneratedProtocolMessageType( + "DetectLanguageRequest", + (_message.Message,), + dict( + LabelsEntry=_reflection.GeneratedProtocolMessageType( + "LabelsEntry", + (_message.Message,), + dict( + DESCRIPTOR=_DETECTLANGUAGEREQUEST_LABELSENTRY, + __module__="google.cloud.translation_v3.proto.translation_service_pb2" + # @@protoc_insertion_point(class_scope:google.cloud.translation.v3.DetectLanguageRequest.LabelsEntry) + ), + ), + DESCRIPTOR=_DETECTLANGUAGEREQUEST, + __module__="google.cloud.translation_v3.proto.translation_service_pb2", + __doc__="""The request message for language detection. + + + Attributes: + parent: + Required. Project or location to make a call. Must refer to a + caller's project. Format: ``projects/{project-number-or- + id}/locations/{location-id}`` or ``projects/{project-number- + or-id}``. For global calls, use ``projects/{project-number- + or-id}/locations/global`` or ``projects/{project-number-or- + id}``. Only models within the same region (has same location- + id) can be used. Otherwise an INVALID\_ARGUMENT (400) error is + returned. + model: + Optional. The language detection model to be used. Format: + ``projects/{project-number-or-id}/locations/{location- + id}/models/language-detection/{model-id}`` Only one language + detection model is currently supported: ``projects/{project- + number-or-id}/locations/{location-id}/models/language- + detection/default``. If not specified, the default model is + used. + source: + Required. The source of the document from which to detect the + language. + content: + The content of the input stored as a string. + mime_type: + Optional. The format of the source text, for example, + "text/html", "text/plain". If left blank, the MIME type + defaults to "text/html". + labels: + Optional. The labels with user-defined metadata for the + request. Label keys and values can be no longer than 63 + characters (Unicode codepoints), can only contain lowercase + letters, numeric characters, underscores and dashes. + International characters are allowed. Label values are + optional. Label keys must start with a letter. See + https://cloud.google.com/translate/docs/labels for more + information. + """, + # @@protoc_insertion_point(class_scope:google.cloud.translation.v3.DetectLanguageRequest) + ), +) +_sym_db.RegisterMessage(DetectLanguageRequest) +_sym_db.RegisterMessage(DetectLanguageRequest.LabelsEntry) + +DetectedLanguage = _reflection.GeneratedProtocolMessageType( + "DetectedLanguage", + (_message.Message,), + dict( + DESCRIPTOR=_DETECTEDLANGUAGE, + __module__="google.cloud.translation_v3.proto.translation_service_pb2", + __doc__="""The response message for language detection. + + + Attributes: + language_code: + The BCP-47 language code of source content in the request, + detected automatically. + confidence: + The confidence of the detection result for this language. + """, + # @@protoc_insertion_point(class_scope:google.cloud.translation.v3.DetectedLanguage) + ), +) +_sym_db.RegisterMessage(DetectedLanguage) + +DetectLanguageResponse = _reflection.GeneratedProtocolMessageType( + "DetectLanguageResponse", + (_message.Message,), + dict( + DESCRIPTOR=_DETECTLANGUAGERESPONSE, + __module__="google.cloud.translation_v3.proto.translation_service_pb2", + __doc__="""The response message for language detection. + + + Attributes: + languages: + A list of detected languages sorted by detection confidence in + descending order. The most probable language first. + """, + # @@protoc_insertion_point(class_scope:google.cloud.translation.v3.DetectLanguageResponse) + ), +) +_sym_db.RegisterMessage(DetectLanguageResponse) + +GetSupportedLanguagesRequest = _reflection.GeneratedProtocolMessageType( + "GetSupportedLanguagesRequest", + (_message.Message,), + dict( + DESCRIPTOR=_GETSUPPORTEDLANGUAGESREQUEST, + __module__="google.cloud.translation_v3.proto.translation_service_pb2", + __doc__="""The request message for discovering supported languages. + + + Attributes: + parent: + Required. Project or location to make a call. Must refer to a + caller's project. Format: ``projects/{project-number-or-id}`` + or ``projects/{project-number-or-id}/locations/{location- + id}``. For global calls, use ``projects/{project-number-or- + id}/locations/global`` or ``projects/{project-number-or-id}``. + Non-global location is required for AutoML models. Only + models within the same region (have same location-id) can be + used, otherwise an INVALID\_ARGUMENT (400) error is returned. + display_language_code: + Optional. The language to use to return localized, human + readable names of supported languages. If missing, then + display names are not returned in a response. + model: + Optional. Get supported languages of this model. The format + depends on model type: - AutoML Translation models: + ``projects/{project-number-or-id}/locations/{location- + id}/models/{model-id}`` - General (built-in) models: + ``projects/{project-number-or-id}/locations/{location- + id}/models/general/nmt``, ``projects/{project-number-or- + id}/locations/{location-id}/models/general/base`` Returns + languages supported by the specified model. If missing, we get + supported languages of Google general base (PBMT) model. + """, + # @@protoc_insertion_point(class_scope:google.cloud.translation.v3.GetSupportedLanguagesRequest) + ), +) +_sym_db.RegisterMessage(GetSupportedLanguagesRequest) + +SupportedLanguages = _reflection.GeneratedProtocolMessageType( + "SupportedLanguages", + (_message.Message,), + dict( + DESCRIPTOR=_SUPPORTEDLANGUAGES, + __module__="google.cloud.translation_v3.proto.translation_service_pb2", + __doc__="""The response message for discovering supported languages. + + + Attributes: + languages: + A list of supported language responses. This list contains an + entry for each language the Translation API supports. + """, + # @@protoc_insertion_point(class_scope:google.cloud.translation.v3.SupportedLanguages) + ), +) +_sym_db.RegisterMessage(SupportedLanguages) + +SupportedLanguage = _reflection.GeneratedProtocolMessageType( + "SupportedLanguage", + (_message.Message,), + dict( + DESCRIPTOR=_SUPPORTEDLANGUAGE, + __module__="google.cloud.translation_v3.proto.translation_service_pb2", + __doc__="""A single supported language response corresponds to information related + to one supported language. + + + Attributes: + language_code: + Supported language code, generally consisting of its ISO 639-1 + identifier, for example, 'en', 'ja'. In certain cases, BCP-47 + codes including language and region identifiers are returned + (for example, 'zh-TW' and 'zh-CN') + display_name: + Human readable name of the language localized in the display + language specified in the request. + support_source: + Can be used as source language. + support_target: + Can be used as target language. + """, + # @@protoc_insertion_point(class_scope:google.cloud.translation.v3.SupportedLanguage) + ), +) +_sym_db.RegisterMessage(SupportedLanguage) + +GcsSource = _reflection.GeneratedProtocolMessageType( + "GcsSource", + (_message.Message,), + dict( + DESCRIPTOR=_GCSSOURCE, + __module__="google.cloud.translation_v3.proto.translation_service_pb2", + __doc__="""The Google Cloud Storage location for the input content. + + + Attributes: + input_uri: + Required. Source data URI. For example, + ``gs://my_bucket/my_object``. + """, + # @@protoc_insertion_point(class_scope:google.cloud.translation.v3.GcsSource) + ), +) +_sym_db.RegisterMessage(GcsSource) + +InputConfig = _reflection.GeneratedProtocolMessageType( + "InputConfig", + (_message.Message,), + dict( + DESCRIPTOR=_INPUTCONFIG, + __module__="google.cloud.translation_v3.proto.translation_service_pb2", + __doc__="""Input configuration for BatchTranslateText request. + + + Attributes: + mime_type: + Optional. Can be "text/plain" or "text/html". For ``.tsv``, + "text/html" is used if mime\_type is missing. For ``.html``, + this field must be "text/html" or empty. For ``.txt``, this + field must be "text/plain" or empty. + source: + Required. Specify the input. + gcs_source: + Required. Google Cloud Storage location for the source input. + This can be a single file (for example, ``gs://translation- + test/input.tsv``) or a wildcard (for example, + ``gs://translation-test/*``). If a file extension is ``.tsv``, + it can contain either one or two columns. The first column + (optional) is the id of the text request. If the first column + is missing, we use the row number (0-based) from the input + file as the ID in the output file. The second column is the + actual text to be translated. We recommend each row be <= 10K + Unicode codepoints, otherwise an error might be returned. Note + that the input tsv must be RFC 4180 compliant. You could use + https://github.com/Clever/csvlint to check potential + formatting errors in your tsv file. csvlint --delimiter=':raw- + latex:`\t`' your\_input\_file.tsv The other supported file + extensions are ``.txt`` or ``.html``, which is treated as a + single large chunk of text. + """, + # @@protoc_insertion_point(class_scope:google.cloud.translation.v3.InputConfig) + ), +) +_sym_db.RegisterMessage(InputConfig) + +GcsDestination = _reflection.GeneratedProtocolMessageType( + "GcsDestination", + (_message.Message,), + dict( + DESCRIPTOR=_GCSDESTINATION, + __module__="google.cloud.translation_v3.proto.translation_service_pb2", + __doc__="""The Google Cloud Storage location for the output content. + + + Attributes: + output_uri_prefix: + Required. There must be no files under 'output\_uri\_prefix'. + 'output\_uri\_prefix' must end with "/" and start with + "gs://", otherwise an INVALID\_ARGUMENT (400) error is + returned. + """, + # @@protoc_insertion_point(class_scope:google.cloud.translation.v3.GcsDestination) + ), +) +_sym_db.RegisterMessage(GcsDestination) + +OutputConfig = _reflection.GeneratedProtocolMessageType( + "OutputConfig", + (_message.Message,), + dict( + DESCRIPTOR=_OUTPUTCONFIG, + __module__="google.cloud.translation_v3.proto.translation_service_pb2", + __doc__="""Output configuration for BatchTranslateText request. + + + Attributes: + destination: + Required. The destination of output. + gcs_destination: + Google Cloud Storage destination for output content. For every + single input file (for example, gs://a/b/c.[extension]), we + generate at most 2 \* n output files. (n is the # of + target\_language\_codes in the BatchTranslateTextRequest). + Output files (tsv) generated are compliant with RFC 4180 + except that record delimiters are ``\\\\n`` instead of + ``\\\\r\\\\n``. We don't provide any way to + change record delimiters. While the input files are being + processed, we write/update an index file 'index.csv' under + 'output\_uri\_prefix' (for example, gs://translation- + test/index.csv) The index file is generated/updated as new + files are being translated. The format is: input\_file,target + \_language\_code,translations\_file,errors\_file, + glossary\_translations\_file,glossary\_errors\_file + input\_file is one file we matched using + gcs\_source.input\_uri. target\_language\_code is provided in + the request. translations\_file contains the translations. + (details provided below) errors\_file contains the errors + during processing of the file. (details below). Both + translations\_file and errors\_file could be empty strings if + we have no content to output. glossary\_translations\_file and + glossary\_errors\_file are always empty strings if the + input\_file is tsv. They could also be empty if we have no + content to output. Once a row is present in index.csv, the + input/output matching never changes. Callers should also + expect all the content in input\_file are processed and ready + to be consumed (that is, no partial output file is written). + The format of translations\_file (for target language code + 'trg') is: gs://translation\_test/a\_b\_c\_'trg'\_translations + .[extension] If the input file extension is tsv, the output + has the following columns: Column 1: ID of the request + provided in the input, if it's not provided in the input, then + the input row number is used (0-based). Column 2: source + sentence. Column 3: translation without applying a glossary. + Empty string if there is an error. Column 4 (only present if a + glossary is provided in the request): translation after + applying the glossary. Empty string if there is an error + applying the glossary. Could be same string as column 3 if + there is no glossary applied. If input file extension is a + txt or html, the translation is directly written to the output + file. If glossary is requested, a separate + glossary\_translations\_file has format of gs://translation\_t + est/a\_b\_c\_'trg'\_glossary\_translations.[extension] The + format of errors file (for target language code 'trg') is: + gs://translation\_test/a\_b\_c\_'trg'\_errors.[extension] If + the input file extension is tsv, errors\_file contains the + following: Column 1: ID of the request provided in the input, + if it's not provided in the input, then the input row number + is used (0-based). Column 2: source sentence. Column 3: Error + detail for the translation. Could be empty. Column 4 (only + present if a glossary is provided in the request): Error when + applying the glossary. If the input file extension is txt or + html, glossary\_error\_file will be generated that contains + error details. glossary\_error\_file has format of gs://transl + ation\_test/a\_b\_c\_'trg'\_glossary\_errors.[extension] + """, + # @@protoc_insertion_point(class_scope:google.cloud.translation.v3.OutputConfig) + ), +) +_sym_db.RegisterMessage(OutputConfig) + +BatchTranslateTextRequest = _reflection.GeneratedProtocolMessageType( + "BatchTranslateTextRequest", + (_message.Message,), + dict( + ModelsEntry=_reflection.GeneratedProtocolMessageType( + "ModelsEntry", + (_message.Message,), + dict( + DESCRIPTOR=_BATCHTRANSLATETEXTREQUEST_MODELSENTRY, + __module__="google.cloud.translation_v3.proto.translation_service_pb2" + # @@protoc_insertion_point(class_scope:google.cloud.translation.v3.BatchTranslateTextRequest.ModelsEntry) + ), + ), + GlossariesEntry=_reflection.GeneratedProtocolMessageType( + "GlossariesEntry", + (_message.Message,), + dict( + DESCRIPTOR=_BATCHTRANSLATETEXTREQUEST_GLOSSARIESENTRY, + __module__="google.cloud.translation_v3.proto.translation_service_pb2" + # @@protoc_insertion_point(class_scope:google.cloud.translation.v3.BatchTranslateTextRequest.GlossariesEntry) + ), + ), + LabelsEntry=_reflection.GeneratedProtocolMessageType( + "LabelsEntry", + (_message.Message,), + dict( + DESCRIPTOR=_BATCHTRANSLATETEXTREQUEST_LABELSENTRY, + __module__="google.cloud.translation_v3.proto.translation_service_pb2" + # @@protoc_insertion_point(class_scope:google.cloud.translation.v3.BatchTranslateTextRequest.LabelsEntry) + ), + ), + DESCRIPTOR=_BATCHTRANSLATETEXTREQUEST, + __module__="google.cloud.translation_v3.proto.translation_service_pb2", + __doc__="""The batch translation request. + + + Attributes: + parent: + Required. Location to make a call. Must refer to a caller's + project. Format: ``projects/{project-number-or- + id}/locations/{location-id}``. The ``global`` location is not + supported for batch translation. Only AutoML Translation + models or glossaries within the same region (have the same + location-id) can be used, otherwise an INVALID\_ARGUMENT (400) + error is returned. + source_language_code: + Required. Source language code. + target_language_codes: + Required. Specify up to 10 language codes here. + models: + Optional. The models to use for translation. Map's key is + target language code. Map's value is model name. Value can be + a built-in general model, or an AutoML Translation model. The + value format depends on model type: - AutoML Translation + models: ``projects/{project-number-or- + id}/locations/{location-id}/models/{model-id}`` - General + (built-in) models: ``projects/{project-number-or- + id}/locations/{location-id}/models/general/nmt``, + ``projects/{project-number-or-id}/locations/{location- + id}/models/general/base`` If the map is empty or a specific + model is not requested for a language pair, then default + google model (nmt) is used. + input_configs: + Required. Input configurations. The total number of files + matched should be <= 1000. The total content size should be <= + 100M Unicode codepoints. The files must use UTF-8 encoding. + output_config: + Required. Output configuration. If 2 input configs match to + the same file (that is, same input path), we don't generate + output for duplicate inputs. + glossaries: + Optional. Glossaries to be applied for translation. It's keyed + by target language code. + labels: + Optional. The labels with user-defined metadata for the + request. Label keys and values can be no longer than 63 + characters (Unicode codepoints), can only contain lowercase + letters, numeric characters, underscores and dashes. + International characters are allowed. Label values are + optional. Label keys must start with a letter. See + https://cloud.google.com/translate/docs/labels for more + information. + """, + # @@protoc_insertion_point(class_scope:google.cloud.translation.v3.BatchTranslateTextRequest) + ), +) +_sym_db.RegisterMessage(BatchTranslateTextRequest) +_sym_db.RegisterMessage(BatchTranslateTextRequest.ModelsEntry) +_sym_db.RegisterMessage(BatchTranslateTextRequest.GlossariesEntry) +_sym_db.RegisterMessage(BatchTranslateTextRequest.LabelsEntry) + +BatchTranslateMetadata = _reflection.GeneratedProtocolMessageType( + "BatchTranslateMetadata", + (_message.Message,), + dict( + DESCRIPTOR=_BATCHTRANSLATEMETADATA, + __module__="google.cloud.translation_v3.proto.translation_service_pb2", + __doc__="""State metadata for the batch translation operation. + + + Attributes: + state: + The state of the operation. + translated_characters: + Number of successfully translated characters so far (Unicode + codepoints). + failed_characters: + Number of characters that have failed to process so far + (Unicode codepoints). + total_characters: + Total number of characters (Unicode codepoints). This is the + total number of codepoints from input files times the number + of target languages and appears here shortly after the call is + submitted. + submit_time: + Time when the operation was submitted. + """, + # @@protoc_insertion_point(class_scope:google.cloud.translation.v3.BatchTranslateMetadata) + ), +) +_sym_db.RegisterMessage(BatchTranslateMetadata) + +BatchTranslateResponse = _reflection.GeneratedProtocolMessageType( + "BatchTranslateResponse", + (_message.Message,), + dict( + DESCRIPTOR=_BATCHTRANSLATERESPONSE, + __module__="google.cloud.translation_v3.proto.translation_service_pb2", + __doc__="""Stored in the + [google.longrunning.Operation.response][google.longrunning.Operation.response] + field returned by BatchTranslateText if at least one sentence is + translated successfully. + + + Attributes: + total_characters: + Total number of characters (Unicode codepoints). + translated_characters: + Number of successfully translated characters (Unicode + codepoints). + failed_characters: + Number of characters that have failed to process (Unicode + codepoints). + submit_time: + Time when the operation was submitted. + end_time: + The time when the operation is finished and [google.longrunnin + g.Operation.done][google.longrunning.Operation.done] is set to + true. + """, + # @@protoc_insertion_point(class_scope:google.cloud.translation.v3.BatchTranslateResponse) + ), +) +_sym_db.RegisterMessage(BatchTranslateResponse) + +GlossaryInputConfig = _reflection.GeneratedProtocolMessageType( + "GlossaryInputConfig", + (_message.Message,), + dict( + DESCRIPTOR=_GLOSSARYINPUTCONFIG, + __module__="google.cloud.translation_v3.proto.translation_service_pb2", + __doc__="""Input configuration for glossaries. + + + Attributes: + source: + Required. Specify the input. + gcs_source: + Required. Google Cloud Storage location of glossary data. File + format is determined based on the filename extension. API + returns [google.rpc.Code.INVALID\_ARGUMENT] for unsupported + URI-s and file formats. Wildcards are not allowed. This must + be a single file in one of the following formats: For + unidirectional glossaries: - TSV/CSV (``.tsv``/``.csv``): 2 + column file, tab- or comma-separated. The first column is + source text. The second column is target text. The file + must not contain headers. That is, the first row is data, + not column names. - TMX (``.tmx``): TMX file with parallel + data defining source/target term pairs. For equivalent + term sets glossaries: - CSV (``.csv``): Multi-column CSV + file defining equivalent glossary terms in multiple + languages. The format is defined for Google Translation + Toolkit and documented in `Use a glossary `__. + """, + # @@protoc_insertion_point(class_scope:google.cloud.translation.v3.GlossaryInputConfig) + ), +) +_sym_db.RegisterMessage(GlossaryInputConfig) + +Glossary = _reflection.GeneratedProtocolMessageType( + "Glossary", + (_message.Message,), + dict( + LanguageCodePair=_reflection.GeneratedProtocolMessageType( + "LanguageCodePair", + (_message.Message,), + dict( + DESCRIPTOR=_GLOSSARY_LANGUAGECODEPAIR, + __module__="google.cloud.translation_v3.proto.translation_service_pb2", + __doc__="""Used with unidirectional glossaries. + + + Attributes: + source_language_code: + Required. The BCP-47 language code of the input text, for + example, "en-US". Expected to be an exact match for + GlossaryTerm.language\_code. + target_language_code: + Required. The BCP-47 language code for translation output, for + example, "zh-CN". Expected to be an exact match for + GlossaryTerm.language\_code. + """, + # @@protoc_insertion_point(class_scope:google.cloud.translation.v3.Glossary.LanguageCodePair) + ), + ), + LanguageCodesSet=_reflection.GeneratedProtocolMessageType( + "LanguageCodesSet", + (_message.Message,), + dict( + DESCRIPTOR=_GLOSSARY_LANGUAGECODESSET, + __module__="google.cloud.translation_v3.proto.translation_service_pb2", + __doc__="""Used with equivalent term set glossaries. + + + Attributes: + language_codes: + The BCP-47 language code(s) for terms defined in the glossary. + All entries are unique. The list contains at least two + entries. Expected to be an exact match for + GlossaryTerm.language\_code. + """, + # @@protoc_insertion_point(class_scope:google.cloud.translation.v3.Glossary.LanguageCodesSet) + ), + ), + DESCRIPTOR=_GLOSSARY, + __module__="google.cloud.translation_v3.proto.translation_service_pb2", + __doc__="""Represents a glossary built from user provided data. + + + Attributes: + name: + Required. The resource name of the glossary. Glossary names + have the form ``projects/{project-number-or- + id}/locations/{location-id}/glossaries/{glossary-id}``. + languages: + Languages supported by the glossary. + language_pair: + Used with unidirectional glossaries. + language_codes_set: + Used with equivalent term set glossaries. + input_config: + Required. Provides examples to build the glossary from. Total + glossary must not exceed 10M Unicode codepoints. + entry_count: + Output only. The number of entries defined in the glossary. + submit_time: + Output only. When CreateGlossary was called. + end_time: + Output only. When the glossary creation was finished. + """, + # @@protoc_insertion_point(class_scope:google.cloud.translation.v3.Glossary) + ), +) +_sym_db.RegisterMessage(Glossary) +_sym_db.RegisterMessage(Glossary.LanguageCodePair) +_sym_db.RegisterMessage(Glossary.LanguageCodesSet) + +CreateGlossaryRequest = _reflection.GeneratedProtocolMessageType( + "CreateGlossaryRequest", + (_message.Message,), + dict( + DESCRIPTOR=_CREATEGLOSSARYREQUEST, + __module__="google.cloud.translation_v3.proto.translation_service_pb2", + __doc__="""Request message for CreateGlossary. + + + Attributes: + parent: + Required. The project name. + glossary: + Required. The glossary to create. + """, + # @@protoc_insertion_point(class_scope:google.cloud.translation.v3.CreateGlossaryRequest) + ), +) +_sym_db.RegisterMessage(CreateGlossaryRequest) + +GetGlossaryRequest = _reflection.GeneratedProtocolMessageType( + "GetGlossaryRequest", + (_message.Message,), + dict( + DESCRIPTOR=_GETGLOSSARYREQUEST, + __module__="google.cloud.translation_v3.proto.translation_service_pb2", + __doc__="""Request message for GetGlossary. + + + Attributes: + name: + Required. The name of the glossary to retrieve. + """, + # @@protoc_insertion_point(class_scope:google.cloud.translation.v3.GetGlossaryRequest) + ), +) +_sym_db.RegisterMessage(GetGlossaryRequest) + +DeleteGlossaryRequest = _reflection.GeneratedProtocolMessageType( + "DeleteGlossaryRequest", + (_message.Message,), + dict( + DESCRIPTOR=_DELETEGLOSSARYREQUEST, + __module__="google.cloud.translation_v3.proto.translation_service_pb2", + __doc__="""Request message for DeleteGlossary. + + + Attributes: + name: + Required. The name of the glossary to delete. + """, + # @@protoc_insertion_point(class_scope:google.cloud.translation.v3.DeleteGlossaryRequest) + ), +) +_sym_db.RegisterMessage(DeleteGlossaryRequest) + +ListGlossariesRequest = _reflection.GeneratedProtocolMessageType( + "ListGlossariesRequest", + (_message.Message,), + dict( + DESCRIPTOR=_LISTGLOSSARIESREQUEST, + __module__="google.cloud.translation_v3.proto.translation_service_pb2", + __doc__="""Request message for ListGlossaries. + + + Attributes: + parent: + Required. The name of the project from which to list all of + the glossaries. + page_size: + Optional. Requested page size. The server may return fewer + glossaries than requested. If unspecified, the server picks an + appropriate default. + page_token: + Optional. A token identifying a page of results the server + should return. Typically, this is the value of + [ListGlossariesResponse.next\_page\_token] returned from the + previous call to ``ListGlossaries`` method. The first page is + returned if ``page_token``\ is empty or missing. + filter: + Optional. Filter specifying constraints of a list operation. + Filtering is not supported yet, and the parameter currently + has no effect. If missing, no filtering is performed. + """, + # @@protoc_insertion_point(class_scope:google.cloud.translation.v3.ListGlossariesRequest) + ), +) +_sym_db.RegisterMessage(ListGlossariesRequest) + +ListGlossariesResponse = _reflection.GeneratedProtocolMessageType( + "ListGlossariesResponse", + (_message.Message,), + dict( + DESCRIPTOR=_LISTGLOSSARIESRESPONSE, + __module__="google.cloud.translation_v3.proto.translation_service_pb2", + __doc__="""Response message for ListGlossaries. + + + Attributes: + glossaries: + The list of glossaries for a project. + next_page_token: + A token to retrieve a page of results. Pass this value in the + [ListGlossariesRequest.page\_token] field in the subsequent + call to ``ListGlossaries`` method to retrieve the next page of + results. + """, + # @@protoc_insertion_point(class_scope:google.cloud.translation.v3.ListGlossariesResponse) + ), +) +_sym_db.RegisterMessage(ListGlossariesResponse) + +CreateGlossaryMetadata = _reflection.GeneratedProtocolMessageType( + "CreateGlossaryMetadata", + (_message.Message,), + dict( + DESCRIPTOR=_CREATEGLOSSARYMETADATA, + __module__="google.cloud.translation_v3.proto.translation_service_pb2", + __doc__="""Stored in the + [google.longrunning.Operation.metadata][google.longrunning.Operation.metadata] + field returned by CreateGlossary. + + + Attributes: + name: + The name of the glossary that is being created. + state: + The current state of the glossary creation operation. + submit_time: + The time when the operation was submitted to the server. + """, + # @@protoc_insertion_point(class_scope:google.cloud.translation.v3.CreateGlossaryMetadata) + ), +) +_sym_db.RegisterMessage(CreateGlossaryMetadata) + +DeleteGlossaryMetadata = _reflection.GeneratedProtocolMessageType( + "DeleteGlossaryMetadata", + (_message.Message,), + dict( + DESCRIPTOR=_DELETEGLOSSARYMETADATA, + __module__="google.cloud.translation_v3.proto.translation_service_pb2", + __doc__="""Stored in the + [google.longrunning.Operation.metadata][google.longrunning.Operation.metadata] + field returned by DeleteGlossary. + + + Attributes: + name: + The name of the glossary that is being deleted. + state: + The current state of the glossary deletion operation. + submit_time: + The time when the operation was submitted to the server. + """, + # @@protoc_insertion_point(class_scope:google.cloud.translation.v3.DeleteGlossaryMetadata) + ), +) +_sym_db.RegisterMessage(DeleteGlossaryMetadata) + +DeleteGlossaryResponse = _reflection.GeneratedProtocolMessageType( + "DeleteGlossaryResponse", + (_message.Message,), + dict( + DESCRIPTOR=_DELETEGLOSSARYRESPONSE, + __module__="google.cloud.translation_v3.proto.translation_service_pb2", + __doc__="""Stored in the + [google.longrunning.Operation.response][google.longrunning.Operation.response] + field returned by DeleteGlossary. + + + Attributes: + name: + The name of the deleted glossary. + submit_time: + The time when the operation was submitted to the server. + end_time: + The time when the glossary deletion is finished and [google.lo + ngrunning.Operation.done][google.longrunning.Operation.done] + is set to true. + """, + # @@protoc_insertion_point(class_scope:google.cloud.translation.v3.DeleteGlossaryResponse) + ), +) +_sym_db.RegisterMessage(DeleteGlossaryResponse) + + +DESCRIPTOR._options = None +_TRANSLATETEXTGLOSSARYCONFIG.fields_by_name["glossary"]._options = None +_TRANSLATETEXTGLOSSARYCONFIG.fields_by_name["ignore_case"]._options = None +_TRANSLATETEXTREQUEST_LABELSENTRY._options = None +_TRANSLATETEXTREQUEST.fields_by_name["contents"]._options = None +_TRANSLATETEXTREQUEST.fields_by_name["mime_type"]._options = None +_TRANSLATETEXTREQUEST.fields_by_name["source_language_code"]._options = None +_TRANSLATETEXTREQUEST.fields_by_name["target_language_code"]._options = None +_TRANSLATETEXTREQUEST.fields_by_name["parent"]._options = None +_TRANSLATETEXTREQUEST.fields_by_name["model"]._options = None +_TRANSLATETEXTREQUEST.fields_by_name["glossary_config"]._options = None +_TRANSLATETEXTREQUEST.fields_by_name["labels"]._options = None +_DETECTLANGUAGEREQUEST_LABELSENTRY._options = None +_DETECTLANGUAGEREQUEST.fields_by_name["parent"]._options = None +_DETECTLANGUAGEREQUEST.fields_by_name["model"]._options = None +_DETECTLANGUAGEREQUEST.fields_by_name["mime_type"]._options = None +_DETECTLANGUAGEREQUEST.fields_by_name["labels"]._options = None +_GETSUPPORTEDLANGUAGESREQUEST.fields_by_name["parent"]._options = None +_GETSUPPORTEDLANGUAGESREQUEST.fields_by_name["display_language_code"]._options = None +_GETSUPPORTEDLANGUAGESREQUEST.fields_by_name["model"]._options = None +_INPUTCONFIG.fields_by_name["mime_type"]._options = None +_BATCHTRANSLATETEXTREQUEST_MODELSENTRY._options = None +_BATCHTRANSLATETEXTREQUEST_GLOSSARIESENTRY._options = None +_BATCHTRANSLATETEXTREQUEST_LABELSENTRY._options = None +_BATCHTRANSLATETEXTREQUEST.fields_by_name["parent"]._options = None +_BATCHTRANSLATETEXTREQUEST.fields_by_name["source_language_code"]._options = None +_BATCHTRANSLATETEXTREQUEST.fields_by_name["target_language_codes"]._options = None +_BATCHTRANSLATETEXTREQUEST.fields_by_name["models"]._options = None +_BATCHTRANSLATETEXTREQUEST.fields_by_name["input_configs"]._options = None +_BATCHTRANSLATETEXTREQUEST.fields_by_name["output_config"]._options = None +_BATCHTRANSLATETEXTREQUEST.fields_by_name["glossaries"]._options = None +_BATCHTRANSLATETEXTREQUEST.fields_by_name["labels"]._options = None +_GLOSSARY.fields_by_name["entry_count"]._options = None +_GLOSSARY.fields_by_name["submit_time"]._options = None +_GLOSSARY.fields_by_name["end_time"]._options = None +_GLOSSARY._options = None +_CREATEGLOSSARYREQUEST.fields_by_name["parent"]._options = None +_CREATEGLOSSARYREQUEST.fields_by_name["glossary"]._options = None +_GETGLOSSARYREQUEST.fields_by_name["name"]._options = None +_DELETEGLOSSARYREQUEST.fields_by_name["name"]._options = None +_LISTGLOSSARIESREQUEST.fields_by_name["parent"]._options = None +_LISTGLOSSARIESREQUEST.fields_by_name["page_size"]._options = None +_LISTGLOSSARIESREQUEST.fields_by_name["page_token"]._options = None +_LISTGLOSSARIESREQUEST.fields_by_name["filter"]._options = None + +_TRANSLATIONSERVICE = _descriptor.ServiceDescriptor( + name="TranslationService", + full_name="google.cloud.translation.v3.TranslationService", + file=DESCRIPTOR, + index=0, + serialized_options=_b( + "\312A\030translate.googleapis.com\322A`https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-translation" + ), + serialized_start=5516, + serialized_end=7578, + methods=[ + _descriptor.MethodDescriptor( + name="TranslateText", + full_name="google.cloud.translation.v3.TranslationService.TranslateText", + index=0, + containing_service=None, + input_type=_TRANSLATETEXTREQUEST, + output_type=_TRANSLATETEXTRESPONSE, + serialized_options=_b( + '\202\323\344\223\002b"1/v3/{parent=projects/*/locations/*}:translateText:\001*Z*"%/v3/{parent=projects/*}:translateText:\001*\332A$parent,target_language_code,contents\332AIparent,model,mime_type,source_language_code,target_language_code,contents' + ), + ), + _descriptor.MethodDescriptor( + name="DetectLanguage", + full_name="google.cloud.translation.v3.TranslationService.DetectLanguage", + index=1, + containing_service=None, + input_type=_DETECTLANGUAGEREQUEST, + output_type=_DETECTLANGUAGERESPONSE, + serialized_options=_b( + '\202\323\344\223\002d"2/v3/{parent=projects/*/locations/*}:detectLanguage:\001*Z+"&/v3/{parent=projects/*}:detectLanguage:\001*\332A\036parent,model,mime_type,content' + ), + ), + _descriptor.MethodDescriptor( + name="GetSupportedLanguages", + full_name="google.cloud.translation.v3.TranslationService.GetSupportedLanguages", + index=2, + containing_service=None, + input_type=_GETSUPPORTEDLANGUAGESREQUEST, + output_type=_SUPPORTEDLANGUAGES, + serialized_options=_b( + '\202\323\344\223\002f\0226/v3/{parent=projects/*/locations/*}/supportedLanguagesZ,\022*/v3/{parent=projects/*}/supportedLanguages\332A"parent,model,display_language_code' + ), + ), + _descriptor.MethodDescriptor( + name="BatchTranslateText", + full_name="google.cloud.translation.v3.TranslationService.BatchTranslateText", + index=3, + containing_service=None, + input_type=_BATCHTRANSLATETEXTREQUEST, + output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, + serialized_options=_b( + '\202\323\344\223\002;"6/v3/{parent=projects/*/locations/*}:batchTranslateText:\001*\312A0\n\026BatchTranslateResponse\022\026BatchTranslateMetadata' + ), + ), + _descriptor.MethodDescriptor( + name="CreateGlossary", + full_name="google.cloud.translation.v3.TranslationService.CreateGlossary", + index=4, + containing_service=None, + input_type=_CREATEGLOSSARYREQUEST, + output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, + serialized_options=_b( + '\202\323\344\223\002:"./v3/{parent=projects/*/locations/*}/glossaries:\010glossary\332A\017parent,glossary\312A"\n\010Glossary\022\026CreateGlossaryMetadata' + ), + ), + _descriptor.MethodDescriptor( + name="ListGlossaries", + full_name="google.cloud.translation.v3.TranslationService.ListGlossaries", + index=5, + containing_service=None, + input_type=_LISTGLOSSARIESREQUEST, + output_type=_LISTGLOSSARIESRESPONSE, + serialized_options=_b( + "\202\323\344\223\0020\022./v3/{parent=projects/*/locations/*}/glossaries\332A\006parent" + ), + ), + _descriptor.MethodDescriptor( + name="GetGlossary", + full_name="google.cloud.translation.v3.TranslationService.GetGlossary", + index=6, + containing_service=None, + input_type=_GETGLOSSARYREQUEST, + output_type=_GLOSSARY, + serialized_options=_b( + "\202\323\344\223\0020\022./v3/{name=projects/*/locations/*/glossaries/*}\332A\004name" + ), + ), + _descriptor.MethodDescriptor( + name="DeleteGlossary", + full_name="google.cloud.translation.v3.TranslationService.DeleteGlossary", + index=7, + containing_service=None, + input_type=_DELETEGLOSSARYREQUEST, + output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, + serialized_options=_b( + "\202\323\344\223\0020*./v3/{name=projects/*/locations/*/glossaries/*}\332A\004name\312A0\n\026DeleteGlossaryResponse\022\026DeleteGlossaryMetadata" + ), + ), + ], +) +_sym_db.RegisterServiceDescriptor(_TRANSLATIONSERVICE) + +DESCRIPTOR.services_by_name["TranslationService"] = _TRANSLATIONSERVICE + +# @@protoc_insertion_point(module_scope) diff --git a/translate/google/cloud/translate_v3/proto/translation_service_pb2_grpc.py b/translate/google/cloud/translate_v3/proto/translation_service_pb2_grpc.py new file mode 100644 index 000000000000..5f7f9813ffbe --- /dev/null +++ b/translate/google/cloud/translate_v3/proto/translation_service_pb2_grpc.py @@ -0,0 +1,186 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +import grpc + +from google.cloud.translate_v3.proto import ( + translation_service_pb2 as google_dot_cloud_dot_translation__v3_dot_proto_dot_translation__service__pb2, +) +from google.longrunning import ( + operations_pb2 as google_dot_longrunning_dot_operations__pb2, +) + + +class TranslationServiceStub(object): + """Proto file for the Cloud Translation API (v3 GA). + + Provides natural language translation operations. + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.TranslateText = channel.unary_unary( + "/google.cloud.translation.v3.TranslationService/TranslateText", + request_serializer=google_dot_cloud_dot_translation__v3_dot_proto_dot_translation__service__pb2.TranslateTextRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_translation__v3_dot_proto_dot_translation__service__pb2.TranslateTextResponse.FromString, + ) + self.DetectLanguage = channel.unary_unary( + "/google.cloud.translation.v3.TranslationService/DetectLanguage", + request_serializer=google_dot_cloud_dot_translation__v3_dot_proto_dot_translation__service__pb2.DetectLanguageRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_translation__v3_dot_proto_dot_translation__service__pb2.DetectLanguageResponse.FromString, + ) + self.GetSupportedLanguages = channel.unary_unary( + "/google.cloud.translation.v3.TranslationService/GetSupportedLanguages", + request_serializer=google_dot_cloud_dot_translation__v3_dot_proto_dot_translation__service__pb2.GetSupportedLanguagesRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_translation__v3_dot_proto_dot_translation__service__pb2.SupportedLanguages.FromString, + ) + self.BatchTranslateText = channel.unary_unary( + "/google.cloud.translation.v3.TranslationService/BatchTranslateText", + request_serializer=google_dot_cloud_dot_translation__v3_dot_proto_dot_translation__service__pb2.BatchTranslateTextRequest.SerializeToString, + response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString, + ) + self.CreateGlossary = channel.unary_unary( + "/google.cloud.translation.v3.TranslationService/CreateGlossary", + request_serializer=google_dot_cloud_dot_translation__v3_dot_proto_dot_translation__service__pb2.CreateGlossaryRequest.SerializeToString, + response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString, + ) + self.ListGlossaries = channel.unary_unary( + "/google.cloud.translation.v3.TranslationService/ListGlossaries", + request_serializer=google_dot_cloud_dot_translation__v3_dot_proto_dot_translation__service__pb2.ListGlossariesRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_translation__v3_dot_proto_dot_translation__service__pb2.ListGlossariesResponse.FromString, + ) + self.GetGlossary = channel.unary_unary( + "/google.cloud.translation.v3.TranslationService/GetGlossary", + request_serializer=google_dot_cloud_dot_translation__v3_dot_proto_dot_translation__service__pb2.GetGlossaryRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_translation__v3_dot_proto_dot_translation__service__pb2.Glossary.FromString, + ) + self.DeleteGlossary = channel.unary_unary( + "/google.cloud.translation.v3.TranslationService/DeleteGlossary", + request_serializer=google_dot_cloud_dot_translation__v3_dot_proto_dot_translation__service__pb2.DeleteGlossaryRequest.SerializeToString, + response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString, + ) + + +class TranslationServiceServicer(object): + """Proto file for the Cloud Translation API (v3 GA). + + Provides natural language translation operations. + """ + + def TranslateText(self, request, context): + """Translates input text and returns translated text. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def DetectLanguage(self, request, context): + """Detects the language of text within a request. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def GetSupportedLanguages(self, request, context): + """Returns a list of supported languages for translation. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def BatchTranslateText(self, request, context): + """Translates a large volume of text in asynchronous batch mode. + This function provides real-time output as the inputs are being processed. + If caller cancels a request, the partial results (for an input file, it's + all or nothing) may still be available on the specified output location. + + This call returns immediately and you can + use google.longrunning.Operation.name to poll the status of the call. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def CreateGlossary(self, request, context): + """Creates a glossary and returns the long-running operation. Returns + NOT_FOUND, if the project doesn't exist. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def ListGlossaries(self, request, context): + """Lists glossaries in a project. Returns NOT_FOUND, if the project doesn't + exist. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def GetGlossary(self, request, context): + """Gets a glossary. Returns NOT_FOUND, if the glossary doesn't + exist. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def DeleteGlossary(self, request, context): + """Deletes a glossary, or cancels glossary construction + if the glossary isn't created yet. + Returns NOT_FOUND, if the glossary doesn't exist. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + +def add_TranslationServiceServicer_to_server(servicer, server): + rpc_method_handlers = { + "TranslateText": grpc.unary_unary_rpc_method_handler( + servicer.TranslateText, + request_deserializer=google_dot_cloud_dot_translation__v3_dot_proto_dot_translation__service__pb2.TranslateTextRequest.FromString, + response_serializer=google_dot_cloud_dot_translation__v3_dot_proto_dot_translation__service__pb2.TranslateTextResponse.SerializeToString, + ), + "DetectLanguage": grpc.unary_unary_rpc_method_handler( + servicer.DetectLanguage, + request_deserializer=google_dot_cloud_dot_translation__v3_dot_proto_dot_translation__service__pb2.DetectLanguageRequest.FromString, + response_serializer=google_dot_cloud_dot_translation__v3_dot_proto_dot_translation__service__pb2.DetectLanguageResponse.SerializeToString, + ), + "GetSupportedLanguages": grpc.unary_unary_rpc_method_handler( + servicer.GetSupportedLanguages, + request_deserializer=google_dot_cloud_dot_translation__v3_dot_proto_dot_translation__service__pb2.GetSupportedLanguagesRequest.FromString, + response_serializer=google_dot_cloud_dot_translation__v3_dot_proto_dot_translation__service__pb2.SupportedLanguages.SerializeToString, + ), + "BatchTranslateText": grpc.unary_unary_rpc_method_handler( + servicer.BatchTranslateText, + request_deserializer=google_dot_cloud_dot_translation__v3_dot_proto_dot_translation__service__pb2.BatchTranslateTextRequest.FromString, + response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, + ), + "CreateGlossary": grpc.unary_unary_rpc_method_handler( + servicer.CreateGlossary, + request_deserializer=google_dot_cloud_dot_translation__v3_dot_proto_dot_translation__service__pb2.CreateGlossaryRequest.FromString, + response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, + ), + "ListGlossaries": grpc.unary_unary_rpc_method_handler( + servicer.ListGlossaries, + request_deserializer=google_dot_cloud_dot_translation__v3_dot_proto_dot_translation__service__pb2.ListGlossariesRequest.FromString, + response_serializer=google_dot_cloud_dot_translation__v3_dot_proto_dot_translation__service__pb2.ListGlossariesResponse.SerializeToString, + ), + "GetGlossary": grpc.unary_unary_rpc_method_handler( + servicer.GetGlossary, + request_deserializer=google_dot_cloud_dot_translation__v3_dot_proto_dot_translation__service__pb2.GetGlossaryRequest.FromString, + response_serializer=google_dot_cloud_dot_translation__v3_dot_proto_dot_translation__service__pb2.Glossary.SerializeToString, + ), + "DeleteGlossary": grpc.unary_unary_rpc_method_handler( + servicer.DeleteGlossary, + request_deserializer=google_dot_cloud_dot_translation__v3_dot_proto_dot_translation__service__pb2.DeleteGlossaryRequest.FromString, + response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + "google.cloud.translation.v3.TranslationService", rpc_method_handlers + ) + server.add_generic_rpc_handlers((generic_handler,)) diff --git a/videointelligence/google/cloud/videointelligence_v1beta1/types.py b/translate/google/cloud/translate_v3/types.py similarity index 87% rename from videointelligence/google/cloud/videointelligence_v1beta1/types.py rename to translate/google/cloud/translate_v3/types.py index f4b0fd1b0bde..14c494adc850 100644 --- a/videointelligence/google/cloud/videointelligence_v1beta1/types.py +++ b/translate/google/cloud/translate_v3/types.py @@ -20,7 +20,7 @@ from google.api_core.protobuf_helpers import get_messages -from google.cloud.videointelligence_v1beta1.proto import video_intelligence_pb2 +from google.cloud.translate_v3.proto import translation_service_pb2 from google.longrunning import operations_pb2 from google.protobuf import any_pb2 from google.protobuf import timestamp_pb2 @@ -29,7 +29,7 @@ _shared_modules = [operations_pb2, any_pb2, timestamp_pb2, status_pb2] -_local_modules = [video_intelligence_pb2] +_local_modules = [translation_service_pb2] names = [] @@ -39,7 +39,7 @@ names.append(name) for module in _local_modules: for name, message in get_messages(module).items(): - message.__module__ = "google.cloud.videointelligence_v1beta1.types" + message.__module__ = "google.cloud.translate_v3.types" setattr(sys.modules[__name__], name, message) names.append(name) diff --git a/translate/google/cloud/translate_v3beta1/gapic/translation_service_client.py b/translate/google/cloud/translate_v3beta1/gapic/translation_service_client.py index b0feb083369d..c62bacfd859b 100644 --- a/translate/google/cloud/translate_v3beta1/gapic/translation_service_client.py +++ b/translate/google/cloud/translate_v3beta1/gapic/translation_service_client.py @@ -214,11 +214,12 @@ def translate_text( self, contents, target_language_code, + parent, mime_type=None, source_language_code=None, - parent=None, model=None, glossary_config=None, + labels=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, @@ -236,8 +237,9 @@ def translate_text( >>> >>> # TODO: Initialize `target_language_code`: >>> target_language_code = '' + >>> parent = client.location_path('[PROJECT]', '[LOCATION]') >>> - >>> response = client.translate_text(contents, target_language_code) + >>> response = client.translate_text(contents, target_language_code, parent) Args: contents (list[str]): Required. The content of the input in string format. @@ -245,6 +247,20 @@ def translate_text( Use BatchTranslateText for larger text. target_language_code (str): Required. The BCP-47 language code to use for translation of the input text, set to one of the language codes listed in Language Support. + parent (str): Required. Project or location to make a call. Must refer to a caller's + project. + + Format: ``projects/{project-id}`` or + ``projects/{project-id}/locations/{location-id}``. + + For global calls, use ``projects/{project-id}/locations/global`` or + ``projects/{project-id}``. + + Non-global location is required for requests using AutoML models or + custom glossaries. + + Models and glossaries must be within the same region (have same + location-id), otherwise an INVALID\_ARGUMENT (400) error is returned. mime_type (str): Optional. The format of the source text, for example, "text/html", "text/plain". If left blank, the MIME type defaults to "text/html". source_language_code (str): Optional. The BCP-47 language code of the input text if @@ -252,14 +268,6 @@ def translate_text( listed in Language Support. If the source language isn't specified, the API attempts to identify the source language automatically and returns the source language within the response. - parent (str): Required. Location to make a regional or global call. - - Format: ``projects/{project-id}/locations/{location-id}``. - - For global calls, use ``projects/{project-id}/locations/global``. - - Models and glossaries must be within the same region (have same - location-id), otherwise an INVALID\_ARGUMENT (400) error is returned. model (str): Optional. The ``model`` type requested for this translation. The format depends on model type: @@ -282,6 +290,14 @@ def translate_text( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.translate_v3beta1.types.TranslateTextGlossaryConfig` + labels (dict[str -> str]): Optional. The labels with user-defined metadata for the request. + + Label keys and values can be no longer than 63 characters + (Unicode codepoints), can only contain lowercase letters, numeric + characters, underscores and dashes. International characters are allowed. + Label values are optional. Label keys must start with a letter. + + See https://cloud.google.com/translate/docs/labels for more information. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will be retried using a default configuration. @@ -315,11 +331,12 @@ def translate_text( request = translation_service_pb2.TranslateTextRequest( contents=contents, target_language_code=target_language_code, + parent=parent, mime_type=mime_type, source_language_code=source_language_code, - parent=parent, model=model, glossary_config=glossary_config, + labels=labels, ) if metadata is None: metadata = [] @@ -340,10 +357,11 @@ def translate_text( def detect_language( self, - parent=None, + parent, model=None, content=None, mime_type=None, + labels=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, @@ -356,14 +374,19 @@ def detect_language( >>> >>> client = translate_v3beta1.TranslationServiceClient() >>> - >>> response = client.detect_language() + >>> parent = client.location_path('[PROJECT]', '[LOCATION]') + >>> + >>> response = client.detect_language(parent) Args: - parent (str): Required. Location to make a regional or global call. + parent (str): Required. Project or location to make a call. Must refer to a caller's + project. - Format: ``projects/{project-id}/locations/{location-id}``. + Format: ``projects/{project-id}/locations/{location-id}`` or + ``projects/{project-id}``. - For global calls, use ``projects/{project-id}/locations/global``. + For global calls, use ``projects/{project-id}/locations/global`` or + ``projects/{project-id}``. Only models within the same region (has same location-id) can be used. Otherwise an INVALID\_ARGUMENT (400) error is returned. @@ -379,6 +402,14 @@ def detect_language( content (str): The content of the input stored as a string. mime_type (str): Optional. The format of the source text, for example, "text/html", "text/plain". If left blank, the MIME type defaults to "text/html". + labels (dict[str -> str]): Optional. The labels with user-defined metadata for the request. + + Label keys and values can be no longer than 63 characters + (Unicode codepoints), can only contain lowercase letters, numeric + characters, underscores and dashes. International characters are allowed. + Label values are optional. Label keys must start with a letter. + + See https://cloud.google.com/translate/docs/labels for more information. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will be retried using a default configuration. @@ -414,7 +445,11 @@ def detect_language( google.api_core.protobuf_helpers.check_oneof(content=content) request = translation_service_pb2.DetectLanguageRequest( - parent=parent, model=model, content=content, mime_type=mime_type + parent=parent, + model=model, + content=content, + mime_type=mime_type, + labels=labels, ) if metadata is None: metadata = [] @@ -435,7 +470,7 @@ def detect_language( def get_supported_languages( self, - parent=None, + parent, display_language_code=None, model=None, retry=google.api_core.gapic_v1.method.DEFAULT, @@ -450,14 +485,21 @@ def get_supported_languages( >>> >>> client = translate_v3beta1.TranslationServiceClient() >>> - >>> response = client.get_supported_languages() + >>> parent = client.location_path('[PROJECT]', '[LOCATION]') + >>> + >>> response = client.get_supported_languages(parent) Args: - parent (str): Required. Location to make a regional or global call. + parent (str): Required. Project or location to make a call. Must refer to a caller's + project. - Format: ``projects/{project-id}/locations/{location-id}``. + Format: ``projects/{project-id}`` or + ``projects/{project-id}/locations/{location-id}``. - For global calls, use ``projects/{project-id}/locations/global``. + For global calls, use ``projects/{project-id}/locations/global`` or + ``projects/{project-id}``. + + Non-global location is required for AutoML models. Only models within the same region (have same location-id) can be used, otherwise an INVALID\_ARGUMENT (400) error is returned. @@ -529,13 +571,14 @@ def get_supported_languages( def batch_translate_text( self, + parent, source_language_code, target_language_codes, input_configs, output_config, - parent=None, models=None, glossaries=None, + labels=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, @@ -554,6 +597,8 @@ def batch_translate_text( >>> >>> client = translate_v3beta1.TranslationServiceClient() >>> + >>> parent = client.location_path('[PROJECT]', '[LOCATION]') + >>> >>> # TODO: Initialize `source_language_code`: >>> source_language_code = '' >>> @@ -566,7 +611,7 @@ def batch_translate_text( >>> # TODO: Initialize `output_config`: >>> output_config = {} >>> - >>> response = client.batch_translate_text(source_language_code, target_language_codes, input_configs, output_config) + >>> response = client.batch_translate_text(parent, source_language_code, target_language_codes, input_configs, output_config) >>> >>> def callback(operation_future): ... # Handle result. @@ -578,6 +623,15 @@ def batch_translate_text( >>> metadata = response.metadata() Args: + parent (str): Required. Location to make a call. Must refer to a caller's project. + + Format: ``projects/{project-id}/locations/{location-id}``. + + The ``global`` location is not supported for batch translation. + + Only AutoML Translation models or glossaries within the same region + (have the same location-id) can be used, otherwise an INVALID\_ARGUMENT + (400) error is returned. source_language_code (str): Required. Source language code. target_language_codes (list[str]): Required. Specify up to 10 language codes here. input_configs (list[Union[dict, ~google.cloud.translate_v3beta1.types.InputConfig]]): Required. Input configurations. @@ -593,15 +647,6 @@ def batch_translate_text( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.translate_v3beta1.types.OutputConfig` - parent (str): Required. Location to make a regional call. - - Format: ``projects/{project-id}/locations/{location-id}``. - - The ``global`` location is not supported for batch translation. - - Only AutoML Translation models or glossaries within the same region - (have the same location-id) can be used, otherwise an INVALID\_ARGUMENT - (400) error is returned. models (dict[str -> str]): Optional. The models to use for translation. Map's key is target language code. Map's value is model name. Value can be a built-in general model, or an AutoML Translation model. @@ -622,6 +667,14 @@ def batch_translate_text( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.translate_v3beta1.types.TranslateTextGlossaryConfig` + labels (dict[str -> str]): Optional. The labels with user-defined metadata for the request. + + Label keys and values can be no longer than 63 characters + (Unicode codepoints), can only contain lowercase letters, numeric + characters, underscores and dashes. International characters are allowed. + Label values are optional. Label keys must start with a letter. + + See https://cloud.google.com/translate/docs/labels for more information. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will be retried using a default configuration. @@ -653,13 +706,14 @@ def batch_translate_text( ) request = translation_service_pb2.BatchTranslateTextRequest( + parent=parent, source_language_code=source_language_code, target_language_codes=target_language_codes, input_configs=input_configs, output_config=output_config, - parent=parent, models=models, glossaries=glossaries, + labels=labels, ) if metadata is None: metadata = [] @@ -781,7 +835,7 @@ def create_glossary( def list_glossaries( self, - parent=None, + parent, page_size=None, filter_=None, retry=google.api_core.gapic_v1.method.DEFAULT, @@ -797,8 +851,10 @@ def list_glossaries( >>> >>> client = translate_v3beta1.TranslationServiceClient() >>> + >>> parent = client.location_path('[PROJECT]', '[LOCATION]') + >>> >>> # Iterate over all results - >>> for element in client.list_glossaries(): + >>> for element in client.list_glossaries(parent): ... # process element ... pass >>> @@ -806,7 +862,7 @@ def list_glossaries( >>> # Alternatively: >>> >>> # Iterate over results one page at a time - >>> for page in client.list_glossaries().pages: + >>> for page in client.list_glossaries(parent).pages: ... for element in page: ... # process element ... pass diff --git a/translate/google/cloud/translate_v3beta1/gapic/translation_service_client_config.py b/translate/google/cloud/translate_v3beta1/gapic/translation_service_client_config.py index 21b7872180e8..af45b10d95d6 100644 --- a/translate/google/cloud/translate_v3beta1/gapic/translation_service_client_config.py +++ b/translate/google/cloud/translate_v3beta1/gapic/translation_service_client_config.py @@ -54,7 +54,7 @@ }, "DeleteGlossary": { "timeout_millis": 60000, - "retry_codes_name": "idempotent", + "retry_codes_name": "non_idempotent", "retry_params_name": "default", }, }, diff --git a/translate/google/cloud/translate_v3beta1/proto/translation_service.proto b/translate/google/cloud/translate_v3beta1/proto/translation_service.proto index fa20f01693f2..e62a50883822 100644 --- a/translate/google/cloud/translate_v3beta1/proto/translation_service.proto +++ b/translate/google/cloud/translate_v3beta1/proto/translation_service.proto @@ -18,10 +18,11 @@ syntax = "proto3"; package google.cloud.translation.v3beta1; import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; import "google/api/resource.proto"; import "google/longrunning/operations.proto"; import "google/protobuf/timestamp.proto"; -import "google/api/client.proto"; option cc_enable_arenas = true; option csharp_namespace = "Google.Cloud.Translate.V3Beta1"; @@ -36,7 +37,10 @@ option ruby_package = "Google::Cloud::Translate::V3beta1"; // Provides natural language translation operations. service TranslationService { - option (google.api.default_host) = "translation.googleapis.com"; + option (google.api.default_host) = "translate.googleapis.com"; + option (google.api.oauth_scopes) = + "https://www.googleapis.com/auth/cloud-platform," + "https://www.googleapis.com/auth/cloud-translation"; // Translates input text and returns translated text. rpc TranslateText(TranslateTextRequest) returns (TranslateTextResponse) { @@ -60,6 +64,7 @@ service TranslationService { body: "*" } }; + option (google.api.method_signature) = "parent,model,mime_type"; } // Returns a list of supported languages for translation. @@ -70,6 +75,7 @@ service TranslationService { get: "/v3beta1/{parent=projects/*}/supportedLanguages" } }; + option (google.api.method_signature) = "parent,display_language_code,model"; } // Translates a large volume of text in asynchronous batch mode. @@ -84,6 +90,10 @@ service TranslationService { post: "/v3beta1/{parent=projects/*/locations/*}:batchTranslateText" body: "*" }; + option (google.longrunning.operation_info) = { + response_type: "BatchTranslateResponse" + metadata_type: "BatchTranslateMetadata" + }; } // Creates a glossary and returns the long-running operation. Returns @@ -93,6 +103,11 @@ service TranslationService { post: "/v3beta1/{parent=projects/*/locations/*}/glossaries" body: "glossary" }; + option (google.api.method_signature) = "parent,glossary"; + option (google.longrunning.operation_info) = { + response_type: "Glossary" + metadata_type: "CreateGlossaryMetadata" + }; } // Lists glossaries in a project. Returns NOT_FOUND, if the project doesn't @@ -101,6 +116,8 @@ service TranslationService { option (google.api.http) = { get: "/v3beta1/{parent=projects/*/locations/*}/glossaries" }; + option (google.api.method_signature) = "parent"; + option (google.api.method_signature) = "parent,filter"; } // Gets a glossary. Returns NOT_FOUND, if the glossary doesn't @@ -109,6 +126,7 @@ service TranslationService { option (google.api.http) = { get: "/v3beta1/{name=projects/*/locations/*/glossaries/*}" }; + option (google.api.method_signature) = "name"; } // Deletes a glossary, or cancels glossary construction @@ -118,6 +136,11 @@ service TranslationService { option (google.api.http) = { delete: "/v3beta1/{name=projects/*/locations/*/glossaries/*}" }; + option (google.api.method_signature) = "name"; + option (google.longrunning.operation_info) = { + response_type: "DeleteGlossaryResponse" + metadata_type: "DeleteGlossaryMetadata" + }; } } @@ -126,11 +149,11 @@ service TranslationService { message TranslateTextGlossaryConfig { // Required. Specifies the glossary used for this translation. Use // this format: projects/*/locations/*/glossaries/* - string glossary = 1; + string glossary = 1 [(google.api.field_behavior) = REQUIRED]; // Optional. Indicates match is case-insensitive. // Default value is false if missing. - bool ignore_case = 2; + bool ignore_case = 2 [(google.api.field_behavior) = OPTIONAL]; } // The request message for synchronous translation. @@ -138,32 +161,43 @@ message TranslateTextRequest { // Required. The content of the input in string format. // We recommend the total content be less than 30k codepoints. // Use BatchTranslateText for larger text. - repeated string contents = 1; + repeated string contents = 1 [(google.api.field_behavior) = REQUIRED]; // Optional. The format of the source text, for example, "text/html", // "text/plain". If left blank, the MIME type defaults to "text/html". - string mime_type = 3; + string mime_type = 3 [(google.api.field_behavior) = OPTIONAL]; // Optional. The BCP-47 language code of the input text if // known, for example, "en-US" or "sr-Latn". Supported language codes are // listed in Language Support. If the source language isn't specified, the API // attempts to identify the source language automatically and returns the // source language within the response. - string source_language_code = 4; + string source_language_code = 4 [(google.api.field_behavior) = OPTIONAL]; // Required. The BCP-47 language code to use for translation of the input // text, set to one of the language codes listed in Language Support. - string target_language_code = 5; + string target_language_code = 5 [(google.api.field_behavior) = REQUIRED]; - // Required. Location to make a regional or global call. + // Required. Project or location to make a call. Must refer to a caller's + // project. // - // Format: `projects/{project-id}/locations/{location-id}`. + // Format: `projects/{project-id}` or + // `projects/{project-id}/locations/{location-id}`. // - // For global calls, use `projects/{project-id}/locations/global`. + // For global calls, use `projects/{project-id}/locations/global` or + // `projects/{project-id}`. + // + // Non-global location is required for requests using AutoML models or + // custom glossaries. // // Models and glossaries must be within the same region (have same // location-id), otherwise an INVALID_ARGUMENT (400) error is returned. - string parent = 8; + string parent = 8 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "locations.googleapis.com/Location" + } + ]; // Optional. The `model` type requested for this translation. // @@ -182,12 +216,22 @@ message TranslateTextRequest { // `projects/{project-id}/locations/global/models/general/nmt`. // // If missing, the system decides which google base model to use. - string model = 6; + string model = 6 [(google.api.field_behavior) = OPTIONAL]; // Optional. Glossary to be applied. The glossary must be // within the same region (have the same location-id) as the model, otherwise // an INVALID_ARGUMENT (400) error is returned. - TranslateTextGlossaryConfig glossary_config = 7; + TranslateTextGlossaryConfig glossary_config = 7 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The labels with user-defined metadata for the request. + // + // Label keys and values can be no longer than 63 characters + // (Unicode codepoints), can only contain lowercase letters, numeric + // characters, underscores and dashes. International characters are allowed. + // Label values are optional. Label keys must start with a letter. + // + // See https://cloud.google.com/translate/docs/labels for more information. + map labels = 10 [(google.api.field_behavior) = OPTIONAL]; } message TranslateTextResponse { @@ -225,15 +269,23 @@ message Translation { // The request message for language detection. message DetectLanguageRequest { - // Required. Location to make a regional or global call. + // Required. Project or location to make a call. Must refer to a caller's + // project. // - // Format: `projects/{project-id}/locations/{location-id}`. + // Format: `projects/{project-id}/locations/{location-id}` or + // `projects/{project-id}`. // - // For global calls, use `projects/{project-id}/locations/global`. + // For global calls, use `projects/{project-id}/locations/global` or + // `projects/{project-id}`. // // Only models within the same region (has same location-id) can be used. // Otherwise an INVALID_ARGUMENT (400) error is returned. - string parent = 5; + string parent = 5 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "locations.googleapis.com/Location" + } + ]; // Optional. The language detection model to be used. // @@ -244,7 +296,7 @@ message DetectLanguageRequest { // `projects/{project-id}/locations/{location-id}/models/language-detection/default`. // // If not specified, the default model is used. - string model = 4; + string model = 4 [(google.api.field_behavior) = OPTIONAL]; // Required. The source of the document from which to detect the language. oneof source { @@ -254,7 +306,17 @@ message DetectLanguageRequest { // Optional. The format of the source text, for example, "text/html", // "text/plain". If left blank, the MIME type defaults to "text/html". - string mime_type = 3; + string mime_type = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The labels with user-defined metadata for the request. + // + // Label keys and values can be no longer than 63 characters + // (Unicode codepoints), can only contain lowercase letters, numeric + // characters, underscores and dashes. International characters are allowed. + // Label values are optional. Label keys must start with a letter. + // + // See https://cloud.google.com/translate/docs/labels for more information. + map labels = 6; } // The response message for language detection. @@ -276,20 +338,30 @@ message DetectLanguageResponse { // The request message for discovering supported languages. message GetSupportedLanguagesRequest { - // Required. Location to make a regional or global call. + // Required. Project or location to make a call. Must refer to a caller's + // project. // - // Format: `projects/{project-id}/locations/{location-id}`. + // Format: `projects/{project-id}` or + // `projects/{project-id}/locations/{location-id}`. // - // For global calls, use `projects/{project-id}/locations/global`. + // For global calls, use `projects/{project-id}/locations/global` or + // `projects/{project-id}`. + // + // Non-global location is required for AutoML models. // // Only models within the same region (have same location-id) can be used, // otherwise an INVALID_ARGUMENT (400) error is returned. - string parent = 3; + string parent = 3 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "locations.googleapis.com/Location" + } + ]; // Optional. The language to use to return localized, human readable names // of supported languages. If missing, then display names are not returned // in a response. - string display_language_code = 1; + string display_language_code = 1 [(google.api.field_behavior) = OPTIONAL]; // Optional. Get supported languages of this model. // @@ -305,7 +377,7 @@ message GetSupportedLanguagesRequest { // // Returns languages supported by the specified model. // If missing, we get supported languages of Google general base (PBMT) model. - string model = 2; + string model = 2 [(google.api.field_behavior) = OPTIONAL]; } // The response message for discovering supported languages. @@ -338,7 +410,7 @@ message SupportedLanguage { // The Google Cloud Storage location for the input content. message GcsSource { // Required. Source data URI. For example, `gs://my_bucket/my_object`. - string input_uri = 1; + string input_uri = 1 [(google.api.field_behavior) = REQUIRED]; } // Input configuration for BatchTranslateText request. @@ -347,7 +419,7 @@ message InputConfig { // For `.tsv`, "text/html" is used if mime_type is missing. // For `.html`, this field must be "text/html" or empty. // For `.txt`, this field must be "text/plain" or empty. - string mime_type = 1; + string mime_type = 1 [(google.api.field_behavior) = OPTIONAL]; // Required. Specify the input. oneof source { @@ -373,12 +445,12 @@ message InputConfig { } } -// The Google Cloud Storage location for the output content +// The Google Cloud Storage location for the output content. message GcsDestination { // Required. There must be no files under 'output_uri_prefix'. - // 'output_uri_prefix' must end with "/", otherwise an INVALID_ARGUMENT (400) - // error is returned.. - string output_uri_prefix = 1; + // 'output_uri_prefix' must end with "/" and start with "gs://", otherwise an + // INVALID_ARGUMENT (400) error is returned. + string output_uri_prefix = 1 [(google.api.field_behavior) = REQUIRED]; } // Output configuration for BatchTranslateText request. @@ -457,7 +529,7 @@ message OutputConfig { // The batch translation request. message BatchTranslateTextRequest { - // Required. Location to make a regional call. + // Required. Location to make a call. Must refer to a caller's project. // // Format: `projects/{project-id}/locations/{location-id}`. // @@ -466,13 +538,18 @@ message BatchTranslateTextRequest { // Only AutoML Translation models or glossaries within the same region (have // the same location-id) can be used, otherwise an INVALID_ARGUMENT (400) // error is returned. - string parent = 1; + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "locations.googleapis.com/Location" + } + ]; // Required. Source language code. - string source_language_code = 2; + string source_language_code = 2 [(google.api.field_behavior) = REQUIRED]; // Required. Specify up to 10 language codes here. - repeated string target_language_codes = 3; + repeated string target_language_codes = 3 [(google.api.field_behavior) = REQUIRED]; // Optional. The models to use for translation. Map's key is target language // code. Map's value is model name. Value can be a built-in general model, @@ -490,22 +567,32 @@ message BatchTranslateTextRequest { // // If the map is empty or a specific model is // not requested for a language pair, then default google model (nmt) is used. - map models = 4; + map models = 4 [(google.api.field_behavior) = OPTIONAL]; // Required. Input configurations. // The total number of files matched should be <= 1000. // The total content size should be <= 100M Unicode codepoints. // The files must use UTF-8 encoding. - repeated InputConfig input_configs = 5; + repeated InputConfig input_configs = 5 [(google.api.field_behavior) = REQUIRED]; // Required. Output configuration. // If 2 input configs match to the same file (that is, same input path), // we don't generate output for duplicate inputs. - OutputConfig output_config = 6; + OutputConfig output_config = 6 [(google.api.field_behavior) = REQUIRED]; // Optional. Glossaries to be applied for translation. // It's keyed by target language code. - map glossaries = 7; + map glossaries = 7 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The labels with user-defined metadata for the request. + // + // Label keys and values can be no longer than 63 characters + // (Unicode codepoints), can only contain lowercase letters, numeric + // characters, underscores and dashes. International characters are allowed. + // Label values are optional. Label keys must start with a letter. + // + // See https://cloud.google.com/translate/docs/labels for more information. + map labels = 9 [(google.api.field_behavior) = OPTIONAL]; } // State metadata for the batch translation operation. @@ -606,6 +693,11 @@ message GlossaryInputConfig { // Represents a glossary built from user provided data. message Glossary { + option (google.api.resource) = { + type: "translate.googleapis.com/Glossary" + pattern: "projects/{project}/locations/{location}/glossaries/{glossary}" + }; + // Used with unidirectional glossaries. message LanguageCodePair { // Required. The BCP-47 language code of the input text, for example, @@ -627,7 +719,7 @@ message Glossary { // Required. The resource name of the glossary. Glossary names have the form // `projects/{project-id}/locations/{location-id}/glossaries/{glossary-id}`. - string name = 1; + string name = 1 [(google.api.field_behavior) = REQUIRED]; // Languages supported by the glossary. oneof languages { @@ -643,55 +735,75 @@ message Glossary { GlossaryInputConfig input_config = 5; // Output only. The number of entries defined in the glossary. - int32 entry_count = 6; + int32 entry_count = 6 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. When CreateGlossary was called. - google.protobuf.Timestamp submit_time = 7; + google.protobuf.Timestamp submit_time = 7 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. When the glossary creation was finished. - google.protobuf.Timestamp end_time = 8; + google.protobuf.Timestamp end_time = 8 [(google.api.field_behavior) = OUTPUT_ONLY]; } // Request message for CreateGlossary. message CreateGlossaryRequest { // Required. The project name. - string parent = 1; + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "locations.googleapis.com/Location" + } + ]; // Required. The glossary to create. - Glossary glossary = 2; + Glossary glossary = 2 [(google.api.field_behavior) = REQUIRED]; } // Request message for GetGlossary. message GetGlossaryRequest { // Required. The name of the glossary to retrieve. - string name = 1; + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "translate.googleapis.com/Glossary" + } + ]; } // Request message for DeleteGlossary. message DeleteGlossaryRequest { // Required. The name of the glossary to delete. - string name = 1; + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "translate.googleapis.com/Glossary" + } + ]; } // Request message for ListGlossaries. message ListGlossariesRequest { // Required. The name of the project from which to list all of the glossaries. - string parent = 1; + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "locations.googleapis.com/Location" + } + ]; // Optional. Requested page size. The server may return fewer glossaries than // requested. If unspecified, the server picks an appropriate default. - int32 page_size = 2; + int32 page_size = 2 [(google.api.field_behavior) = OPTIONAL]; // Optional. A token identifying a page of results the server should return. // Typically, this is the value of [ListGlossariesResponse.next_page_token] // returned from the previous call to `ListGlossaries` method. // The first page is returned if `page_token`is empty or missing. - string page_token = 3; + string page_token = 3 [(google.api.field_behavior) = OPTIONAL]; // Optional. Filter specifying constraints of a list operation. // Filtering is not supported yet, and the parameter currently has no effect. // If missing, no filtering is performed. - string filter = 4; + string filter = 4 [(google.api.field_behavior) = OPTIONAL]; } // Response message for ListGlossaries. diff --git a/translate/google/cloud/translate_v3beta1/proto/translation_service_pb2.py b/translate/google/cloud/translate_v3beta1/proto/translation_service_pb2.py index 60dfc368b645..8a423d69322f 100644 --- a/translate/google/cloud/translate_v3beta1/proto/translation_service_pb2.py +++ b/translate/google/cloud/translate_v3beta1/proto/translation_service_pb2.py @@ -16,12 +16,13 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 from google.longrunning import ( operations_pb2 as google_dot_longrunning_dot_operations__pb2, ) from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -from google.api import client_pb2 as google_dot_api_dot_client__pb2 DESCRIPTOR = _descriptor.FileDescriptor( @@ -32,14 +33,15 @@ '\n"com.google.cloud.translate.v3beta1B\027TranslationServiceProtoP\001ZGgoogle.golang.org/genproto/googleapis/cloud/translate/v3beta1;translate\370\001\001\252\002\036Google.Cloud.Translate.V3Beta1\312\002\036Google\\Cloud\\Translate\\V3beta1\352\002!Google::Cloud::Translate::V3beta1' ), serialized_pb=_b( - '\n@google/cloud/translation_v3beta1/proto/translation_service.proto\x12 google.cloud.translation.v3beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x19google/api/resource.proto\x1a#google/longrunning/operations.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/api/client.proto"D\n\x1bTranslateTextGlossaryConfig\x12\x10\n\x08glossary\x18\x01 \x01(\t\x12\x13\n\x0bignore_case\x18\x02 \x01(\x08"\xee\x01\n\x14TranslateTextRequest\x12\x10\n\x08\x63ontents\x18\x01 \x03(\t\x12\x11\n\tmime_type\x18\x03 \x01(\t\x12\x1c\n\x14source_language_code\x18\x04 \x01(\t\x12\x1c\n\x14target_language_code\x18\x05 \x01(\t\x12\x0e\n\x06parent\x18\x08 \x01(\t\x12\r\n\x05model\x18\x06 \x01(\t\x12V\n\x0fglossary_config\x18\x07 \x01(\x0b\x32=.google.cloud.translation.v3beta1.TranslateTextGlossaryConfig"\xaa\x01\n\x15TranslateTextResponse\x12\x43\n\x0ctranslations\x18\x01 \x03(\x0b\x32-.google.cloud.translation.v3beta1.Translation\x12L\n\x15glossary_translations\x18\x03 \x03(\x0b\x32-.google.cloud.translation.v3beta1.Translation"\xad\x01\n\x0bTranslation\x12\x17\n\x0ftranslated_text\x18\x01 \x01(\t\x12\r\n\x05model\x18\x02 \x01(\t\x12\x1e\n\x16\x64\x65tected_language_code\x18\x04 \x01(\t\x12V\n\x0fglossary_config\x18\x03 \x01(\x0b\x32=.google.cloud.translation.v3beta1.TranslateTextGlossaryConfig"f\n\x15\x44\x65tectLanguageRequest\x12\x0e\n\x06parent\x18\x05 \x01(\t\x12\r\n\x05model\x18\x04 \x01(\t\x12\x11\n\x07\x63ontent\x18\x01 \x01(\tH\x00\x12\x11\n\tmime_type\x18\x03 \x01(\tB\x08\n\x06source"=\n\x10\x44\x65tectedLanguage\x12\x15\n\rlanguage_code\x18\x01 \x01(\t\x12\x12\n\nconfidence\x18\x02 \x01(\x02"_\n\x16\x44\x65tectLanguageResponse\x12\x45\n\tlanguages\x18\x01 \x03(\x0b\x32\x32.google.cloud.translation.v3beta1.DetectedLanguage"\\\n\x1cGetSupportedLanguagesRequest\x12\x0e\n\x06parent\x18\x03 \x01(\t\x12\x1d\n\x15\x64isplay_language_code\x18\x01 \x01(\t\x12\r\n\x05model\x18\x02 \x01(\t"\\\n\x12SupportedLanguages\x12\x46\n\tlanguages\x18\x01 \x03(\x0b\x32\x33.google.cloud.translation.v3beta1.SupportedLanguage"p\n\x11SupportedLanguage\x12\x15\n\rlanguage_code\x18\x01 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x02 \x01(\t\x12\x16\n\x0esupport_source\x18\x03 \x01(\x08\x12\x16\n\x0esupport_target\x18\x04 \x01(\x08"\x1e\n\tGcsSource\x12\x11\n\tinput_uri\x18\x01 \x01(\t"m\n\x0bInputConfig\x12\x11\n\tmime_type\x18\x01 \x01(\t\x12\x41\n\ngcs_source\x18\x02 \x01(\x0b\x32+.google.cloud.translation.v3beta1.GcsSourceH\x00\x42\x08\n\x06source"+\n\x0eGcsDestination\x12\x19\n\x11output_uri_prefix\x18\x01 \x01(\t"j\n\x0cOutputConfig\x12K\n\x0fgcs_destination\x18\x01 \x01(\x0b\x32\x30.google.cloud.translation.v3beta1.GcsDestinationH\x00\x42\r\n\x0b\x64\x65stination"\xd0\x04\n\x19\x42\x61tchTranslateTextRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x1c\n\x14source_language_code\x18\x02 \x01(\t\x12\x1d\n\x15target_language_codes\x18\x03 \x03(\t\x12W\n\x06models\x18\x04 \x03(\x0b\x32G.google.cloud.translation.v3beta1.BatchTranslateTextRequest.ModelsEntry\x12\x44\n\rinput_configs\x18\x05 \x03(\x0b\x32-.google.cloud.translation.v3beta1.InputConfig\x12\x45\n\routput_config\x18\x06 \x01(\x0b\x32..google.cloud.translation.v3beta1.OutputConfig\x12_\n\nglossaries\x18\x07 \x03(\x0b\x32K.google.cloud.translation.v3beta1.BatchTranslateTextRequest.GlossariesEntry\x1a-\n\x0bModelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1ap\n\x0fGlossariesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12L\n\x05value\x18\x02 \x01(\x0b\x32=.google.cloud.translation.v3beta1.TranslateTextGlossaryConfig:\x02\x38\x01"\xd3\x02\n\x16\x42\x61tchTranslateMetadata\x12M\n\x05state\x18\x01 \x01(\x0e\x32>.google.cloud.translation.v3beta1.BatchTranslateMetadata.State\x12\x1d\n\x15translated_characters\x18\x02 \x01(\x03\x12\x19\n\x11\x66\x61iled_characters\x18\x03 \x01(\x03\x12\x18\n\x10total_characters\x18\x04 \x01(\x03\x12/\n\x0bsubmit_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"e\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07RUNNING\x10\x01\x12\r\n\tSUCCEEDED\x10\x02\x12\n\n\x06\x46\x41ILED\x10\x03\x12\x0e\n\nCANCELLING\x10\x04\x12\r\n\tCANCELLED\x10\x05"\xcb\x01\n\x16\x42\x61tchTranslateResponse\x12\x18\n\x10total_characters\x18\x01 \x01(\x03\x12\x1d\n\x15translated_characters\x18\x02 \x01(\x03\x12\x19\n\x11\x66\x61iled_characters\x18\x03 \x01(\x03\x12/\n\x0bsubmit_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"b\n\x13GlossaryInputConfig\x12\x41\n\ngcs_source\x18\x01 \x01(\x0b\x32+.google.cloud.translation.v3beta1.GcsSourceH\x00\x42\x08\n\x06source"\x93\x04\n\x08Glossary\x12\x0c\n\x04name\x18\x01 \x01(\t\x12T\n\rlanguage_pair\x18\x03 \x01(\x0b\x32;.google.cloud.translation.v3beta1.Glossary.LanguageCodePairH\x00\x12Y\n\x12language_codes_set\x18\x04 \x01(\x0b\x32;.google.cloud.translation.v3beta1.Glossary.LanguageCodesSetH\x00\x12K\n\x0cinput_config\x18\x05 \x01(\x0b\x32\x35.google.cloud.translation.v3beta1.GlossaryInputConfig\x12\x13\n\x0b\x65ntry_count\x18\x06 \x01(\x05\x12/\n\x0bsubmit_time\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x1aN\n\x10LanguageCodePair\x12\x1c\n\x14source_language_code\x18\x01 \x01(\t\x12\x1c\n\x14target_language_code\x18\x02 \x01(\t\x1a*\n\x10LanguageCodesSet\x12\x16\n\x0elanguage_codes\x18\x01 \x03(\tB\x0b\n\tlanguages"e\n\x15\x43reateGlossaryRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12<\n\x08glossary\x18\x02 \x01(\x0b\x32*.google.cloud.translation.v3beta1.Glossary""\n\x12GetGlossaryRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"%\n\x15\x44\x65leteGlossaryRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"^\n\x15ListGlossariesRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x04 \x01(\t"q\n\x16ListGlossariesResponse\x12>\n\nglossaries\x18\x01 \x03(\x0b\x32*.google.cloud.translation.v3beta1.Glossary\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"\x8d\x02\n\x16\x43reateGlossaryMetadata\x12\x0c\n\x04name\x18\x01 \x01(\t\x12M\n\x05state\x18\x02 \x01(\x0e\x32>.google.cloud.translation.v3beta1.CreateGlossaryMetadata.State\x12/\n\x0bsubmit_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"e\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07RUNNING\x10\x01\x12\r\n\tSUCCEEDED\x10\x02\x12\n\n\x06\x46\x41ILED\x10\x03\x12\x0e\n\nCANCELLING\x10\x04\x12\r\n\tCANCELLED\x10\x05"\x8d\x02\n\x16\x44\x65leteGlossaryMetadata\x12\x0c\n\x04name\x18\x01 \x01(\t\x12M\n\x05state\x18\x02 \x01(\x0e\x32>.google.cloud.translation.v3beta1.DeleteGlossaryMetadata.State\x12/\n\x0bsubmit_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"e\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07RUNNING\x10\x01\x12\r\n\tSUCCEEDED\x10\x02\x12\n\n\x06\x46\x41ILED\x10\x03\x12\x0e\n\nCANCELLING\x10\x04\x12\r\n\tCANCELLED\x10\x05"\x85\x01\n\x16\x44\x65leteGlossaryResponse\x12\x0c\n\x04name\x18\x01 \x01(\t\x12/\n\x0bsubmit_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp2\xb5\r\n\x12TranslationService\x12\xf4\x01\n\rTranslateText\x12\x36.google.cloud.translation.v3beta1.TranslateTextRequest\x1a\x37.google.cloud.translation.v3beta1.TranslateTextResponse"r\x82\xd3\xe4\x93\x02l"6/v3beta1/{parent=projects/*/locations/*}:translateText:\x01*Z/"*/v3beta1/{parent=projects/*}:translateText:\x01*\x12\xf9\x01\n\x0e\x44\x65tectLanguage\x12\x37.google.cloud.translation.v3beta1.DetectLanguageRequest\x1a\x38.google.cloud.translation.v3beta1.DetectLanguageResponse"t\x82\xd3\xe4\x93\x02n"7/v3beta1/{parent=projects/*/locations/*}:detectLanguage:\x01*Z0"+/v3beta1/{parent=projects/*}:detectLanguage:\x01*\x12\x85\x02\n\x15GetSupportedLanguages\x12>.google.cloud.translation.v3beta1.GetSupportedLanguagesRequest\x1a\x34.google.cloud.translation.v3beta1.SupportedLanguages"v\x82\xd3\xe4\x93\x02p\x12;/v3beta1/{parent=projects/*/locations/*}/supportedLanguagesZ1\x12//v3beta1/{parent=projects/*}/supportedLanguages\x12\xb8\x01\n\x12\x42\x61tchTranslateText\x12;.google.cloud.translation.v3beta1.BatchTranslateTextRequest\x1a\x1d.google.longrunning.Operation"F\x82\xd3\xe4\x93\x02@";/v3beta1/{parent=projects/*/locations/*}:batchTranslateText:\x01*\x12\xaf\x01\n\x0e\x43reateGlossary\x12\x37.google.cloud.translation.v3beta1.CreateGlossaryRequest\x1a\x1d.google.longrunning.Operation"E\x82\xd3\xe4\x93\x02?"3/v3beta1/{parent=projects/*/locations/*}/glossaries:\x08glossary\x12\xc0\x01\n\x0eListGlossaries\x12\x37.google.cloud.translation.v3beta1.ListGlossariesRequest\x1a\x38.google.cloud.translation.v3beta1.ListGlossariesResponse";\x82\xd3\xe4\x93\x02\x35\x12\x33/v3beta1/{parent=projects/*/locations/*}/glossaries\x12\xac\x01\n\x0bGetGlossary\x12\x34.google.cloud.translation.v3beta1.GetGlossaryRequest\x1a*.google.cloud.translation.v3beta1.Glossary";\x82\xd3\xe4\x93\x02\x35\x12\x33/v3beta1/{name=projects/*/locations/*/glossaries/*}\x12\xa5\x01\n\x0e\x44\x65leteGlossary\x12\x37.google.cloud.translation.v3beta1.DeleteGlossaryRequest\x1a\x1d.google.longrunning.Operation";\x82\xd3\xe4\x93\x02\x35*3/v3beta1/{name=projects/*/locations/*/glossaries/*}\x1a\x1d\xca\x41\x1atranslation.googleapis.comB\xf1\x01\n"com.google.cloud.translate.v3beta1B\x17TranslationServiceProtoP\x01ZGgoogle.golang.org/genproto/googleapis/cloud/translate/v3beta1;translate\xf8\x01\x01\xaa\x02\x1eGoogle.Cloud.Translate.V3Beta1\xca\x02\x1eGoogle\\Cloud\\Translate\\V3beta1\xea\x02!Google::Cloud::Translate::V3beta1b\x06proto3' + '\n@google/cloud/translation_v3beta1/proto/translation_service.proto\x12 google.cloud.translation.v3beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a#google/longrunning/operations.proto\x1a\x1fgoogle/protobuf/timestamp.proto"N\n\x1bTranslateTextGlossaryConfig\x12\x15\n\x08glossary\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x18\n\x0bignore_case\x18\x02 \x01(\x08\x42\x03\xe0\x41\x01"\xbf\x03\n\x14TranslateTextRequest\x12\x15\n\x08\x63ontents\x18\x01 \x03(\tB\x03\xe0\x41\x02\x12\x16\n\tmime_type\x18\x03 \x01(\tB\x03\xe0\x41\x01\x12!\n\x14source_language_code\x18\x04 \x01(\tB\x03\xe0\x41\x01\x12!\n\x14target_language_code\x18\x05 \x01(\tB\x03\xe0\x41\x02\x12\x39\n\x06parent\x18\x08 \x01(\tB)\xe0\x41\x02\xfa\x41#\n!locations.googleapis.com/Location\x12\x12\n\x05model\x18\x06 \x01(\tB\x03\xe0\x41\x01\x12[\n\x0fglossary_config\x18\x07 \x01(\x0b\x32=.google.cloud.translation.v3beta1.TranslateTextGlossaryConfigB\x03\xe0\x41\x01\x12W\n\x06labels\x18\n \x03(\x0b\x32\x42.google.cloud.translation.v3beta1.TranslateTextRequest.LabelsEntryB\x03\xe0\x41\x01\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xaa\x01\n\x15TranslateTextResponse\x12\x43\n\x0ctranslations\x18\x01 \x03(\x0b\x32-.google.cloud.translation.v3beta1.Translation\x12L\n\x15glossary_translations\x18\x03 \x03(\x0b\x32-.google.cloud.translation.v3beta1.Translation"\xad\x01\n\x0bTranslation\x12\x17\n\x0ftranslated_text\x18\x01 \x01(\t\x12\r\n\x05model\x18\x02 \x01(\t\x12\x1e\n\x16\x64\x65tected_language_code\x18\x04 \x01(\t\x12V\n\x0fglossary_config\x18\x03 \x01(\x0b\x32=.google.cloud.translation.v3beta1.TranslateTextGlossaryConfig"\x9f\x02\n\x15\x44\x65tectLanguageRequest\x12\x39\n\x06parent\x18\x05 \x01(\tB)\xe0\x41\x02\xfa\x41#\n!locations.googleapis.com/Location\x12\x12\n\x05model\x18\x04 \x01(\tB\x03\xe0\x41\x01\x12\x11\n\x07\x63ontent\x18\x01 \x01(\tH\x00\x12\x16\n\tmime_type\x18\x03 \x01(\tB\x03\xe0\x41\x01\x12S\n\x06labels\x18\x06 \x03(\x0b\x32\x43.google.cloud.translation.v3beta1.DetectLanguageRequest.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x08\n\x06source"=\n\x10\x44\x65tectedLanguage\x12\x15\n\rlanguage_code\x18\x01 \x01(\t\x12\x12\n\nconfidence\x18\x02 \x01(\x02"_\n\x16\x44\x65tectLanguageResponse\x12\x45\n\tlanguages\x18\x01 \x03(\x0b\x32\x32.google.cloud.translation.v3beta1.DetectedLanguage"\x91\x01\n\x1cGetSupportedLanguagesRequest\x12\x39\n\x06parent\x18\x03 \x01(\tB)\xe0\x41\x02\xfa\x41#\n!locations.googleapis.com/Location\x12"\n\x15\x64isplay_language_code\x18\x01 \x01(\tB\x03\xe0\x41\x01\x12\x12\n\x05model\x18\x02 \x01(\tB\x03\xe0\x41\x01"\\\n\x12SupportedLanguages\x12\x46\n\tlanguages\x18\x01 \x03(\x0b\x32\x33.google.cloud.translation.v3beta1.SupportedLanguage"p\n\x11SupportedLanguage\x12\x15\n\rlanguage_code\x18\x01 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x02 \x01(\t\x12\x16\n\x0esupport_source\x18\x03 \x01(\x08\x12\x16\n\x0esupport_target\x18\x04 \x01(\x08"#\n\tGcsSource\x12\x16\n\tinput_uri\x18\x01 \x01(\tB\x03\xe0\x41\x02"r\n\x0bInputConfig\x12\x16\n\tmime_type\x18\x01 \x01(\tB\x03\xe0\x41\x01\x12\x41\n\ngcs_source\x18\x02 \x01(\x0b\x32+.google.cloud.translation.v3beta1.GcsSourceH\x00\x42\x08\n\x06source"0\n\x0eGcsDestination\x12\x1e\n\x11output_uri_prefix\x18\x01 \x01(\tB\x03\xe0\x41\x02"j\n\x0cOutputConfig\x12K\n\x0fgcs_destination\x18\x01 \x01(\x0b\x32\x30.google.cloud.translation.v3beta1.GcsDestinationH\x00\x42\r\n\x0b\x64\x65stination"\xa6\x06\n\x19\x42\x61tchTranslateTextRequest\x12\x39\n\x06parent\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\n!locations.googleapis.com/Location\x12!\n\x14source_language_code\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12"\n\x15target_language_codes\x18\x03 \x03(\tB\x03\xe0\x41\x02\x12\\\n\x06models\x18\x04 \x03(\x0b\x32G.google.cloud.translation.v3beta1.BatchTranslateTextRequest.ModelsEntryB\x03\xe0\x41\x01\x12I\n\rinput_configs\x18\x05 \x03(\x0b\x32-.google.cloud.translation.v3beta1.InputConfigB\x03\xe0\x41\x02\x12J\n\routput_config\x18\x06 \x01(\x0b\x32..google.cloud.translation.v3beta1.OutputConfigB\x03\xe0\x41\x02\x12\x64\n\nglossaries\x18\x07 \x03(\x0b\x32K.google.cloud.translation.v3beta1.BatchTranslateTextRequest.GlossariesEntryB\x03\xe0\x41\x01\x12\\\n\x06labels\x18\t \x03(\x0b\x32G.google.cloud.translation.v3beta1.BatchTranslateTextRequest.LabelsEntryB\x03\xe0\x41\x01\x1a-\n\x0bModelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1ap\n\x0fGlossariesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12L\n\x05value\x18\x02 \x01(\x0b\x32=.google.cloud.translation.v3beta1.TranslateTextGlossaryConfig:\x02\x38\x01\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xd3\x02\n\x16\x42\x61tchTranslateMetadata\x12M\n\x05state\x18\x01 \x01(\x0e\x32>.google.cloud.translation.v3beta1.BatchTranslateMetadata.State\x12\x1d\n\x15translated_characters\x18\x02 \x01(\x03\x12\x19\n\x11\x66\x61iled_characters\x18\x03 \x01(\x03\x12\x18\n\x10total_characters\x18\x04 \x01(\x03\x12/\n\x0bsubmit_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"e\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07RUNNING\x10\x01\x12\r\n\tSUCCEEDED\x10\x02\x12\n\n\x06\x46\x41ILED\x10\x03\x12\x0e\n\nCANCELLING\x10\x04\x12\r\n\tCANCELLED\x10\x05"\xcb\x01\n\x16\x42\x61tchTranslateResponse\x12\x18\n\x10total_characters\x18\x01 \x01(\x03\x12\x1d\n\x15translated_characters\x18\x02 \x01(\x03\x12\x19\n\x11\x66\x61iled_characters\x18\x03 \x01(\x03\x12/\n\x0bsubmit_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"b\n\x13GlossaryInputConfig\x12\x41\n\ngcs_source\x18\x01 \x01(\x0b\x32+.google.cloud.translation.v3beta1.GcsSourceH\x00\x42\x08\n\x06source"\x8e\x05\n\x08Glossary\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12T\n\rlanguage_pair\x18\x03 \x01(\x0b\x32;.google.cloud.translation.v3beta1.Glossary.LanguageCodePairH\x00\x12Y\n\x12language_codes_set\x18\x04 \x01(\x0b\x32;.google.cloud.translation.v3beta1.Glossary.LanguageCodesSetH\x00\x12K\n\x0cinput_config\x18\x05 \x01(\x0b\x32\x35.google.cloud.translation.v3beta1.GlossaryInputConfig\x12\x18\n\x0b\x65ntry_count\x18\x06 \x01(\x05\x42\x03\xe0\x41\x03\x12\x34\n\x0bsubmit_time\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x31\n\x08\x65nd_time\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x1aN\n\x10LanguageCodePair\x12\x1c\n\x14source_language_code\x18\x01 \x01(\t\x12\x1c\n\x14target_language_code\x18\x02 \x01(\t\x1a*\n\x10LanguageCodesSet\x12\x16\n\x0elanguage_codes\x18\x01 \x03(\t:e\xea\x41\x62\n!translate.googleapis.com/Glossary\x12=projects/{project}/locations/{location}/glossaries/{glossary}B\x0b\n\tlanguages"\x95\x01\n\x15\x43reateGlossaryRequest\x12\x39\n\x06parent\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\n!locations.googleapis.com/Location\x12\x41\n\x08glossary\x18\x02 \x01(\x0b\x32*.google.cloud.translation.v3beta1.GlossaryB\x03\xe0\x41\x02"M\n\x12GetGlossaryRequest\x12\x37\n\x04name\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\n!translate.googleapis.com/Glossary"P\n\x15\x44\x65leteGlossaryRequest\x12\x37\n\x04name\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\n!translate.googleapis.com/Glossary"\x98\x01\n\x15ListGlossariesRequest\x12\x39\n\x06parent\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\n!locations.googleapis.com/Location\x12\x16\n\tpage_size\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01\x12\x17\n\npage_token\x18\x03 \x01(\tB\x03\xe0\x41\x01\x12\x13\n\x06\x66ilter\x18\x04 \x01(\tB\x03\xe0\x41\x01"q\n\x16ListGlossariesResponse\x12>\n\nglossaries\x18\x01 \x03(\x0b\x32*.google.cloud.translation.v3beta1.Glossary\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"\x8d\x02\n\x16\x43reateGlossaryMetadata\x12\x0c\n\x04name\x18\x01 \x01(\t\x12M\n\x05state\x18\x02 \x01(\x0e\x32>.google.cloud.translation.v3beta1.CreateGlossaryMetadata.State\x12/\n\x0bsubmit_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"e\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07RUNNING\x10\x01\x12\r\n\tSUCCEEDED\x10\x02\x12\n\n\x06\x46\x41ILED\x10\x03\x12\x0e\n\nCANCELLING\x10\x04\x12\r\n\tCANCELLED\x10\x05"\x8d\x02\n\x16\x44\x65leteGlossaryMetadata\x12\x0c\n\x04name\x18\x01 \x01(\t\x12M\n\x05state\x18\x02 \x01(\x0e\x32>.google.cloud.translation.v3beta1.DeleteGlossaryMetadata.State\x12/\n\x0bsubmit_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"e\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07RUNNING\x10\x01\x12\r\n\tSUCCEEDED\x10\x02\x12\n\n\x06\x46\x41ILED\x10\x03\x12\x0e\n\nCANCELLING\x10\x04\x12\r\n\tCANCELLED\x10\x05"\x85\x01\n\x16\x44\x65leteGlossaryResponse\x12\x0c\n\x04name\x18\x01 \x01(\t\x12/\n\x0bsubmit_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp2\x9a\x10\n\x12TranslationService\x12\xf4\x01\n\rTranslateText\x12\x36.google.cloud.translation.v3beta1.TranslateTextRequest\x1a\x37.google.cloud.translation.v3beta1.TranslateTextResponse"r\x82\xd3\xe4\x93\x02l"6/v3beta1/{parent=projects/*/locations/*}:translateText:\x01*Z/"*/v3beta1/{parent=projects/*}:translateText:\x01*\x12\x93\x02\n\x0e\x44\x65tectLanguage\x12\x37.google.cloud.translation.v3beta1.DetectLanguageRequest\x1a\x38.google.cloud.translation.v3beta1.DetectLanguageResponse"\x8d\x01\x82\xd3\xe4\x93\x02n"7/v3beta1/{parent=projects/*/locations/*}:detectLanguage:\x01*Z0"+/v3beta1/{parent=projects/*}:detectLanguage:\x01*\xda\x41\x16parent,model,mime_type\x12\xab\x02\n\x15GetSupportedLanguages\x12>.google.cloud.translation.v3beta1.GetSupportedLanguagesRequest\x1a\x34.google.cloud.translation.v3beta1.SupportedLanguages"\x9b\x01\x82\xd3\xe4\x93\x02p\x12;/v3beta1/{parent=projects/*/locations/*}/supportedLanguagesZ1\x12//v3beta1/{parent=projects/*}/supportedLanguages\xda\x41"parent,display_language_code,model\x12\xeb\x01\n\x12\x42\x61tchTranslateText\x12;.google.cloud.translation.v3beta1.BatchTranslateTextRequest\x1a\x1d.google.longrunning.Operation"y\x82\xd3\xe4\x93\x02@";/v3beta1/{parent=projects/*/locations/*}:batchTranslateText:\x01*\xca\x41\x30\n\x16\x42\x61tchTranslateResponse\x12\x16\x42\x61tchTranslateMetadata\x12\xe6\x01\n\x0e\x43reateGlossary\x12\x37.google.cloud.translation.v3beta1.CreateGlossaryRequest\x1a\x1d.google.longrunning.Operation"|\x82\xd3\xe4\x93\x02?"3/v3beta1/{parent=projects/*/locations/*}/glossaries:\x08glossary\xda\x41\x0fparent,glossary\xca\x41"\n\x08Glossary\x12\x16\x43reateGlossaryMetadata\x12\xd9\x01\n\x0eListGlossaries\x12\x37.google.cloud.translation.v3beta1.ListGlossariesRequest\x1a\x38.google.cloud.translation.v3beta1.ListGlossariesResponse"T\x82\xd3\xe4\x93\x02\x35\x12\x33/v3beta1/{parent=projects/*/locations/*}/glossaries\xda\x41\x06parent\xda\x41\rparent,filter\x12\xb3\x01\n\x0bGetGlossary\x12\x34.google.cloud.translation.v3beta1.GetGlossaryRequest\x1a*.google.cloud.translation.v3beta1.Glossary"B\x82\xd3\xe4\x93\x02\x35\x12\x33/v3beta1/{name=projects/*/locations/*/glossaries/*}\xda\x41\x04name\x12\xdf\x01\n\x0e\x44\x65leteGlossary\x12\x37.google.cloud.translation.v3beta1.DeleteGlossaryRequest\x1a\x1d.google.longrunning.Operation"u\x82\xd3\xe4\x93\x02\x35*3/v3beta1/{name=projects/*/locations/*/glossaries/*}\xda\x41\x04name\xca\x41\x30\n\x16\x44\x65leteGlossaryResponse\x12\x16\x44\x65leteGlossaryMetadata\x1a~\xca\x41\x18translate.googleapis.com\xd2\x41`https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-translationB\xf1\x01\n"com.google.cloud.translate.v3beta1B\x17TranslationServiceProtoP\x01ZGgoogle.golang.org/genproto/googleapis/cloud/translate/v3beta1;translate\xf8\x01\x01\xaa\x02\x1eGoogle.Cloud.Translate.V3Beta1\xca\x02\x1eGoogle\\Cloud\\Translate\\V3beta1\xea\x02!Google::Cloud::Translate::V3beta1b\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_client__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, google_dot_api_dot_resource__pb2.DESCRIPTOR, google_dot_longrunning_dot_operations__pb2.DESCRIPTOR, google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, - google_dot_api_dot_client__pb2.DESCRIPTOR, ], ) @@ -75,8 +77,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=2610, - serialized_end=2711, + serialized_start=3331, + serialized_end=3432, ) _sym_db.RegisterEnumDescriptor(_BATCHTRANSLATEMETADATA_STATE) @@ -111,8 +113,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=2610, - serialized_end=2711, + serialized_start=3331, + serialized_end=3432, ) _sym_db.RegisterEnumDescriptor(_CREATEGLOSSARYMETADATA_STATE) @@ -147,8 +149,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=2610, - serialized_end=2711, + serialized_start=3331, + serialized_end=3432, ) _sym_db.RegisterEnumDescriptor(_DELETEGLOSSARYMETADATA_STATE) @@ -175,7 +177,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -193,7 +195,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -205,11 +207,67 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=254, - serialized_end=322, + serialized_start=287, + serialized_end=365, ) +_TRANSLATETEXTREQUEST_LABELSENTRY = _descriptor.Descriptor( + name="LabelsEntry", + full_name="google.cloud.translation.v3beta1.TranslateTextRequest.LabelsEntry", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="key", + full_name="google.cloud.translation.v3beta1.TranslateTextRequest.LabelsEntry.key", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="value", + full_name="google.cloud.translation.v3beta1.TranslateTextRequest.LabelsEntry.value", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=_b("8\001"), + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=770, + serialized_end=815, +) + _TRANSLATETEXTREQUEST = _descriptor.Descriptor( name="TranslateTextRequest", full_name="google.cloud.translation.v3beta1.TranslateTextRequest", @@ -232,7 +290,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -250,7 +308,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -268,7 +326,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -286,7 +344,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -304,7 +362,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A#\n!locations.googleapis.com/Location" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -322,7 +382,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -340,20 +400,38 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="labels", + full_name="google.cloud.translation.v3beta1.TranslateTextRequest.labels", + index=7, + number=10, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], extensions=[], - nested_types=[], + nested_types=[_TRANSLATETEXTREQUEST_LABELSENTRY], enum_types=[], serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=325, - serialized_end=563, + serialized_start=368, + serialized_end=815, ) @@ -409,8 +487,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=566, - serialized_end=736, + serialized_start=818, + serialized_end=988, ) @@ -502,11 +580,67 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=739, - serialized_end=912, + serialized_start=991, + serialized_end=1164, ) +_DETECTLANGUAGEREQUEST_LABELSENTRY = _descriptor.Descriptor( + name="LabelsEntry", + full_name="google.cloud.translation.v3beta1.DetectLanguageRequest.LabelsEntry", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="key", + full_name="google.cloud.translation.v3beta1.DetectLanguageRequest.LabelsEntry.key", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="value", + full_name="google.cloud.translation.v3beta1.DetectLanguageRequest.LabelsEntry.value", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=_b("8\001"), + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=770, + serialized_end=815, +) + _DETECTLANGUAGEREQUEST = _descriptor.Descriptor( name="DetectLanguageRequest", full_name="google.cloud.translation.v3beta1.DetectLanguageRequest", @@ -529,7 +663,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A#\n!locations.googleapis.com/Location" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -547,7 +683,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -583,12 +719,30 @@ containing_type=None, is_extension=False, extension_scope=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="labels", + full_name="google.cloud.translation.v3beta1.DetectLanguageRequest.labels", + index=4, + number=6, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, serialized_options=None, file=DESCRIPTOR, ), ], extensions=[], - nested_types=[], + nested_types=[_DETECTLANGUAGEREQUEST_LABELSENTRY], enum_types=[], serialized_options=None, is_extendable=False, @@ -603,8 +757,8 @@ fields=[], ) ], - serialized_start=914, - serialized_end=1016, + serialized_start=1167, + serialized_end=1454, ) @@ -660,8 +814,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1018, - serialized_end=1079, + serialized_start=1456, + serialized_end=1517, ) @@ -699,8 +853,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1081, - serialized_end=1176, + serialized_start=1519, + serialized_end=1614, ) @@ -726,7 +880,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A#\n!locations.googleapis.com/Location" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -744,7 +900,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -762,7 +918,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -774,8 +930,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1178, - serialized_end=1270, + serialized_start=1617, + serialized_end=1762, ) @@ -813,8 +969,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1272, - serialized_end=1364, + serialized_start=1764, + serialized_end=1856, ) @@ -906,8 +1062,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1366, - serialized_end=1478, + serialized_start=1858, + serialized_end=1970, ) @@ -933,7 +1089,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ) ], @@ -945,8 +1101,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1480, - serialized_end=1510, + serialized_start=1972, + serialized_end=2007, ) @@ -972,7 +1128,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1010,8 +1166,8 @@ fields=[], ) ], - serialized_start=1512, - serialized_end=1621, + serialized_start=2009, + serialized_end=2123, ) @@ -1037,7 +1193,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ) ], @@ -1049,8 +1205,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1623, - serialized_end=1666, + serialized_start=2125, + serialized_end=2173, ) @@ -1096,8 +1252,8 @@ fields=[], ) ], - serialized_start=1668, - serialized_end=1774, + serialized_start=2175, + serialized_end=2281, ) @@ -1153,8 +1309,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2210, - serialized_end=2255, + serialized_start=2884, + serialized_end=2929, ) _BATCHTRANSLATETEXTREQUEST_GLOSSARIESENTRY = _descriptor.Descriptor( @@ -1209,8 +1365,64 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2257, - serialized_end=2369, + serialized_start=2931, + serialized_end=3043, +) + +_BATCHTRANSLATETEXTREQUEST_LABELSENTRY = _descriptor.Descriptor( + name="LabelsEntry", + full_name="google.cloud.translation.v3beta1.BatchTranslateTextRequest.LabelsEntry", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="key", + full_name="google.cloud.translation.v3beta1.BatchTranslateTextRequest.LabelsEntry.key", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="value", + full_name="google.cloud.translation.v3beta1.BatchTranslateTextRequest.LabelsEntry.value", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=_b("8\001"), + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=770, + serialized_end=815, ) _BATCHTRANSLATETEXTREQUEST = _descriptor.Descriptor( @@ -1235,7 +1447,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A#\n!locations.googleapis.com/Location" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1253,7 +1467,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1271,7 +1485,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1289,7 +1503,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1307,7 +1521,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1325,7 +1539,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1343,7 +1557,25 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="labels", + full_name="google.cloud.translation.v3beta1.BatchTranslateTextRequest.labels", + index=7, + number=9, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -1351,6 +1583,7 @@ nested_types=[ _BATCHTRANSLATETEXTREQUEST_MODELSENTRY, _BATCHTRANSLATETEXTREQUEST_GLOSSARIESENTRY, + _BATCHTRANSLATETEXTREQUEST_LABELSENTRY, ], enum_types=[], serialized_options=None, @@ -1358,8 +1591,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1777, - serialized_end=2369, + serialized_start=2284, + serialized_end=3090, ) @@ -1469,8 +1702,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2372, - serialized_end=2711, + serialized_start=3093, + serialized_end=3432, ) @@ -1580,8 +1813,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2714, - serialized_end=2917, + serialized_start=3435, + serialized_end=3638, ) @@ -1627,8 +1860,8 @@ fields=[], ) ], - serialized_start=2919, - serialized_end=3017, + serialized_start=3640, + serialized_end=3738, ) @@ -1684,8 +1917,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3416, - serialized_end=3494, + serialized_start=4157, + serialized_end=4235, ) _GLOSSARY_LANGUAGECODESSET = _descriptor.Descriptor( @@ -1722,8 +1955,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3496, - serialized_end=3538, + serialized_start=4237, + serialized_end=4279, ) _GLOSSARY = _descriptor.Descriptor( @@ -1748,7 +1981,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1820,7 +2053,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1838,7 +2071,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1856,14 +2089,16 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], extensions=[], nested_types=[_GLOSSARY_LANGUAGECODEPAIR, _GLOSSARY_LANGUAGECODESSET], enum_types=[], - serialized_options=None, + serialized_options=_b( + "\352Ab\n!translate.googleapis.com/Glossary\022=projects/{project}/locations/{location}/glossaries/{glossary}" + ), is_extendable=False, syntax="proto3", extension_ranges=[], @@ -1876,8 +2111,8 @@ fields=[], ) ], - serialized_start=3020, - serialized_end=3551, + serialized_start=3741, + serialized_end=4395, ) @@ -1903,7 +2138,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A#\n!locations.googleapis.com/Location" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1921,7 +2158,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -1933,8 +2170,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3553, - serialized_end=3654, + serialized_start=4398, + serialized_end=4547, ) @@ -1960,7 +2197,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A#\n!translate.googleapis.com/Glossary" + ), file=DESCRIPTOR, ) ], @@ -1972,8 +2211,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3656, - serialized_end=3690, + serialized_start=4549, + serialized_end=4626, ) @@ -1999,7 +2238,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A#\n!translate.googleapis.com/Glossary" + ), file=DESCRIPTOR, ) ], @@ -2011,8 +2252,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3692, - serialized_end=3729, + serialized_start=4628, + serialized_end=4708, ) @@ -2038,7 +2279,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A#\n!locations.googleapis.com/Location" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2056,7 +2299,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2074,7 +2317,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2092,7 +2335,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -2104,8 +2347,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3731, - serialized_end=3825, + serialized_start=4711, + serialized_end=4863, ) @@ -2161,8 +2404,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3827, - serialized_end=3940, + serialized_start=4865, + serialized_end=4978, ) @@ -2236,8 +2479,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3943, - serialized_end=4212, + serialized_start=4981, + serialized_end=5250, ) @@ -2311,8 +2554,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4215, - serialized_end=4484, + serialized_start=5253, + serialized_end=5522, ) @@ -2386,13 +2629,17 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4487, - serialized_end=4620, + serialized_start=5525, + serialized_end=5658, ) +_TRANSLATETEXTREQUEST_LABELSENTRY.containing_type = _TRANSLATETEXTREQUEST _TRANSLATETEXTREQUEST.fields_by_name[ "glossary_config" ].message_type = _TRANSLATETEXTGLOSSARYCONFIG +_TRANSLATETEXTREQUEST.fields_by_name[ + "labels" +].message_type = _TRANSLATETEXTREQUEST_LABELSENTRY _TRANSLATETEXTRESPONSE.fields_by_name["translations"].message_type = _TRANSLATION _TRANSLATETEXTRESPONSE.fields_by_name[ "glossary_translations" @@ -2400,6 +2647,10 @@ _TRANSLATION.fields_by_name[ "glossary_config" ].message_type = _TRANSLATETEXTGLOSSARYCONFIG +_DETECTLANGUAGEREQUEST_LABELSENTRY.containing_type = _DETECTLANGUAGEREQUEST +_DETECTLANGUAGEREQUEST.fields_by_name[ + "labels" +].message_type = _DETECTLANGUAGEREQUEST_LABELSENTRY _DETECTLANGUAGEREQUEST.oneofs_by_name["source"].fields.append( _DETECTLANGUAGEREQUEST.fields_by_name["content"] ) @@ -2427,6 +2678,7 @@ "value" ].message_type = _TRANSLATETEXTGLOSSARYCONFIG _BATCHTRANSLATETEXTREQUEST_GLOSSARIESENTRY.containing_type = _BATCHTRANSLATETEXTREQUEST +_BATCHTRANSLATETEXTREQUEST_LABELSENTRY.containing_type = _BATCHTRANSLATETEXTREQUEST _BATCHTRANSLATETEXTREQUEST.fields_by_name[ "models" ].message_type = _BATCHTRANSLATETEXTREQUEST_MODELSENTRY @@ -2435,6 +2687,9 @@ _BATCHTRANSLATETEXTREQUEST.fields_by_name[ "glossaries" ].message_type = _BATCHTRANSLATETEXTREQUEST_GLOSSARIESENTRY +_BATCHTRANSLATETEXTREQUEST.fields_by_name[ + "labels" +].message_type = _BATCHTRANSLATETEXTREQUEST_LABELSENTRY _BATCHTRANSLATEMETADATA.fields_by_name[ "state" ].enum_type = _BATCHTRANSLATEMETADATA_STATE @@ -2562,6 +2817,15 @@ "TranslateTextRequest", (_message.Message,), dict( + LabelsEntry=_reflection.GeneratedProtocolMessageType( + "LabelsEntry", + (_message.Message,), + dict( + DESCRIPTOR=_TRANSLATETEXTREQUEST_LABELSENTRY, + __module__="google.cloud.translation_v3beta1.proto.translation_service_pb2" + # @@protoc_insertion_point(class_scope:google.cloud.translation.v3beta1.TranslateTextRequest.LabelsEntry) + ), + ), DESCRIPTOR=_TRANSLATETEXTREQUEST, __module__="google.cloud.translation_v3beta1.proto.translation_service_pb2", __doc__="""The request message for synchronous translation. @@ -2588,9 +2852,12 @@ the input text, set to one of the language codes listed in Language Support. parent: - Required. Location to make a regional or global call. Format: + Required. Project or location to make a call. Must refer to a + caller's project. Format: ``projects/{project-id}`` or ``projects/{project-id}/locations/{location-id}``. For global - calls, use ``projects/{project-id}/locations/global``. Models + calls, use ``projects/{project-id}/locations/global`` or + ``projects/{project-id}``. Non-global location is required + for requests using AutoML models or custom glossaries. Models and glossaries must be within the same region (have same location-id), otherwise an INVALID\_ARGUMENT (400) error is returned. @@ -2610,11 +2877,21 @@ Optional. Glossary to be applied. The glossary must be within the same region (have the same location-id) as the model, otherwise an INVALID\_ARGUMENT (400) error is returned. + labels: + Optional. The labels with user-defined metadata for the + request. Label keys and values can be no longer than 63 + characters (Unicode codepoints), can only contain lowercase + letters, numeric characters, underscores and dashes. + International characters are allowed. Label values are + optional. Label keys must start with a letter. See + https://cloud.google.com/translate/docs/labels for more + information. """, # @@protoc_insertion_point(class_scope:google.cloud.translation.v3beta1.TranslateTextRequest) ), ) _sym_db.RegisterMessage(TranslateTextRequest) +_sym_db.RegisterMessage(TranslateTextRequest.LabelsEntry) TranslateTextResponse = _reflection.GeneratedProtocolMessageType( "TranslateTextResponse", @@ -2672,6 +2949,15 @@ "DetectLanguageRequest", (_message.Message,), dict( + LabelsEntry=_reflection.GeneratedProtocolMessageType( + "LabelsEntry", + (_message.Message,), + dict( + DESCRIPTOR=_DETECTLANGUAGEREQUEST_LABELSENTRY, + __module__="google.cloud.translation_v3beta1.proto.translation_service_pb2" + # @@protoc_insertion_point(class_scope:google.cloud.translation.v3beta1.DetectLanguageRequest.LabelsEntry) + ), + ), DESCRIPTOR=_DETECTLANGUAGEREQUEST, __module__="google.cloud.translation_v3beta1.proto.translation_service_pb2", __doc__="""The request message for language detection. @@ -2679,9 +2965,11 @@ Attributes: parent: - Required. Location to make a regional or global call. Format: - ``projects/{project-id}/locations/{location-id}``. For global - calls, use ``projects/{project-id}/locations/global``. Only + Required. Project or location to make a call. Must refer to a + caller's project. Format: ``projects/{project- + id}/locations/{location-id}`` or ``projects/{project-id}``. + For global calls, use ``projects/{project- + id}/locations/global`` or ``projects/{project-id}``. Only models within the same region (has same location-id) can be used. Otherwise an INVALID\_ARGUMENT (400) error is returned. model: @@ -2701,11 +2989,21 @@ Optional. The format of the source text, for example, "text/html", "text/plain". If left blank, the MIME type defaults to "text/html". + labels: + Optional. The labels with user-defined metadata for the + request. Label keys and values can be no longer than 63 + characters (Unicode codepoints), can only contain lowercase + letters, numeric characters, underscores and dashes. + International characters are allowed. Label values are + optional. Label keys must start with a letter. See + https://cloud.google.com/translate/docs/labels for more + information. """, # @@protoc_insertion_point(class_scope:google.cloud.translation.v3beta1.DetectLanguageRequest) ), ) _sym_db.RegisterMessage(DetectLanguageRequest) +_sym_db.RegisterMessage(DetectLanguageRequest.LabelsEntry) DetectedLanguage = _reflection.GeneratedProtocolMessageType( "DetectedLanguage", @@ -2758,11 +3056,14 @@ Attributes: parent: - Required. Location to make a regional or global call. Format: + Required. Project or location to make a call. Must refer to a + caller's project. Format: ``projects/{project-id}`` or ``projects/{project-id}/locations/{location-id}``. For global - calls, use ``projects/{project-id}/locations/global``. Only - models within the same region (have same location-id) can be - used, otherwise an INVALID\_ARGUMENT (400) error is returned. + calls, use ``projects/{project-id}/locations/global`` or + ``projects/{project-id}``. Non-global location is required + for AutoML models. Only models within the same region (have + same location-id) can be used, otherwise an INVALID\_ARGUMENT + (400) error is returned. display_language_code: Optional. The language to use to return localized, human readable names of supported languages. If missing, then @@ -2896,14 +3197,15 @@ dict( DESCRIPTOR=_GCSDESTINATION, __module__="google.cloud.translation_v3beta1.proto.translation_service_pb2", - __doc__="""The Google Cloud Storage location for the output content + __doc__="""The Google Cloud Storage location for the output content. Attributes: output_uri_prefix: Required. There must be no files under 'output\_uri\_prefix'. - 'output\_uri\_prefix' must end with "/", otherwise an - INVALID\_ARGUMENT (400) error is returned.. + 'output\_uri\_prefix' must end with "/" and start with + "gs://", otherwise an INVALID\_ARGUMENT (400) error is + returned. """, # @@protoc_insertion_point(class_scope:google.cloud.translation.v3beta1.GcsDestination) ), @@ -3006,6 +3308,15 @@ # @@protoc_insertion_point(class_scope:google.cloud.translation.v3beta1.BatchTranslateTextRequest.GlossariesEntry) ), ), + LabelsEntry=_reflection.GeneratedProtocolMessageType( + "LabelsEntry", + (_message.Message,), + dict( + DESCRIPTOR=_BATCHTRANSLATETEXTREQUEST_LABELSENTRY, + __module__="google.cloud.translation_v3beta1.proto.translation_service_pb2" + # @@protoc_insertion_point(class_scope:google.cloud.translation.v3beta1.BatchTranslateTextRequest.LabelsEntry) + ), + ), DESCRIPTOR=_BATCHTRANSLATETEXTREQUEST, __module__="google.cloud.translation_v3beta1.proto.translation_service_pb2", __doc__="""The batch translation request. @@ -3013,12 +3324,12 @@ Attributes: parent: - Required. Location to make a regional call. Format: - ``projects/{project-id}/locations/{location-id}``. The - ``global`` location is not supported for batch translation. - Only AutoML Translation models or glossaries within the same - region (have the same location-id) can be used, otherwise an - INVALID\_ARGUMENT (400) error is returned. + Required. Location to make a call. Must refer to a caller's + project. Format: ``projects/{project-id}/locations/{location- + id}``. The ``global`` location is not supported for batch + translation. Only AutoML Translation models or glossaries + within the same region (have the same location-id) can be + used, otherwise an INVALID\_ARGUMENT (400) error is returned. source_language_code: Required. Source language code. target_language_codes: @@ -3046,6 +3357,15 @@ glossaries: Optional. Glossaries to be applied for translation. It's keyed by target language code. + labels: + Optional. The labels with user-defined metadata for the + request. Label keys and values can be no longer than 63 + characters (Unicode codepoints), can only contain lowercase + letters, numeric characters, underscores and dashes. + International characters are allowed. Label values are + optional. Label keys must start with a letter. See + https://cloud.google.com/translate/docs/labels for more + information. """, # @@protoc_insertion_point(class_scope:google.cloud.translation.v3beta1.BatchTranslateTextRequest) ), @@ -3053,6 +3373,7 @@ _sym_db.RegisterMessage(BatchTranslateTextRequest) _sym_db.RegisterMessage(BatchTranslateTextRequest.ModelsEntry) _sym_db.RegisterMessage(BatchTranslateTextRequest.GlossariesEntry) +_sym_db.RegisterMessage(BatchTranslateTextRequest.LabelsEntry) BatchTranslateMetadata = _reflection.GeneratedProtocolMessageType( "BatchTranslateMetadata", @@ -3419,17 +3740,62 @@ DESCRIPTOR._options = None +_TRANSLATETEXTGLOSSARYCONFIG.fields_by_name["glossary"]._options = None +_TRANSLATETEXTGLOSSARYCONFIG.fields_by_name["ignore_case"]._options = None +_TRANSLATETEXTREQUEST_LABELSENTRY._options = None +_TRANSLATETEXTREQUEST.fields_by_name["contents"]._options = None +_TRANSLATETEXTREQUEST.fields_by_name["mime_type"]._options = None +_TRANSLATETEXTREQUEST.fields_by_name["source_language_code"]._options = None +_TRANSLATETEXTREQUEST.fields_by_name["target_language_code"]._options = None +_TRANSLATETEXTREQUEST.fields_by_name["parent"]._options = None +_TRANSLATETEXTREQUEST.fields_by_name["model"]._options = None +_TRANSLATETEXTREQUEST.fields_by_name["glossary_config"]._options = None +_TRANSLATETEXTREQUEST.fields_by_name["labels"]._options = None +_DETECTLANGUAGEREQUEST_LABELSENTRY._options = None +_DETECTLANGUAGEREQUEST.fields_by_name["parent"]._options = None +_DETECTLANGUAGEREQUEST.fields_by_name["model"]._options = None +_DETECTLANGUAGEREQUEST.fields_by_name["mime_type"]._options = None +_GETSUPPORTEDLANGUAGESREQUEST.fields_by_name["parent"]._options = None +_GETSUPPORTEDLANGUAGESREQUEST.fields_by_name["display_language_code"]._options = None +_GETSUPPORTEDLANGUAGESREQUEST.fields_by_name["model"]._options = None +_GCSSOURCE.fields_by_name["input_uri"]._options = None +_INPUTCONFIG.fields_by_name["mime_type"]._options = None +_GCSDESTINATION.fields_by_name["output_uri_prefix"]._options = None _BATCHTRANSLATETEXTREQUEST_MODELSENTRY._options = None _BATCHTRANSLATETEXTREQUEST_GLOSSARIESENTRY._options = None +_BATCHTRANSLATETEXTREQUEST_LABELSENTRY._options = None +_BATCHTRANSLATETEXTREQUEST.fields_by_name["parent"]._options = None +_BATCHTRANSLATETEXTREQUEST.fields_by_name["source_language_code"]._options = None +_BATCHTRANSLATETEXTREQUEST.fields_by_name["target_language_codes"]._options = None +_BATCHTRANSLATETEXTREQUEST.fields_by_name["models"]._options = None +_BATCHTRANSLATETEXTREQUEST.fields_by_name["input_configs"]._options = None +_BATCHTRANSLATETEXTREQUEST.fields_by_name["output_config"]._options = None +_BATCHTRANSLATETEXTREQUEST.fields_by_name["glossaries"]._options = None +_BATCHTRANSLATETEXTREQUEST.fields_by_name["labels"]._options = None +_GLOSSARY.fields_by_name["name"]._options = None +_GLOSSARY.fields_by_name["entry_count"]._options = None +_GLOSSARY.fields_by_name["submit_time"]._options = None +_GLOSSARY.fields_by_name["end_time"]._options = None +_GLOSSARY._options = None +_CREATEGLOSSARYREQUEST.fields_by_name["parent"]._options = None +_CREATEGLOSSARYREQUEST.fields_by_name["glossary"]._options = None +_GETGLOSSARYREQUEST.fields_by_name["name"]._options = None +_DELETEGLOSSARYREQUEST.fields_by_name["name"]._options = None +_LISTGLOSSARIESREQUEST.fields_by_name["parent"]._options = None +_LISTGLOSSARIESREQUEST.fields_by_name["page_size"]._options = None +_LISTGLOSSARIESREQUEST.fields_by_name["page_token"]._options = None +_LISTGLOSSARIESREQUEST.fields_by_name["filter"]._options = None _TRANSLATIONSERVICE = _descriptor.ServiceDescriptor( name="TranslationService", full_name="google.cloud.translation.v3beta1.TranslationService", file=DESCRIPTOR, index=0, - serialized_options=_b("\312A\032translation.googleapis.com"), - serialized_start=4623, - serialized_end=6340, + serialized_options=_b( + "\312A\030translate.googleapis.com\322A`https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-translation" + ), + serialized_start=5661, + serialized_end=7735, methods=[ _descriptor.MethodDescriptor( name="TranslateText", @@ -3450,7 +3816,7 @@ input_type=_DETECTLANGUAGEREQUEST, output_type=_DETECTLANGUAGERESPONSE, serialized_options=_b( - '\202\323\344\223\002n"7/v3beta1/{parent=projects/*/locations/*}:detectLanguage:\001*Z0"+/v3beta1/{parent=projects/*}:detectLanguage:\001*' + '\202\323\344\223\002n"7/v3beta1/{parent=projects/*/locations/*}:detectLanguage:\001*Z0"+/v3beta1/{parent=projects/*}:detectLanguage:\001*\332A\026parent,model,mime_type' ), ), _descriptor.MethodDescriptor( @@ -3461,7 +3827,7 @@ input_type=_GETSUPPORTEDLANGUAGESREQUEST, output_type=_SUPPORTEDLANGUAGES, serialized_options=_b( - "\202\323\344\223\002p\022;/v3beta1/{parent=projects/*/locations/*}/supportedLanguagesZ1\022//v3beta1/{parent=projects/*}/supportedLanguages" + '\202\323\344\223\002p\022;/v3beta1/{parent=projects/*/locations/*}/supportedLanguagesZ1\022//v3beta1/{parent=projects/*}/supportedLanguages\332A"parent,display_language_code,model' ), ), _descriptor.MethodDescriptor( @@ -3472,7 +3838,7 @@ input_type=_BATCHTRANSLATETEXTREQUEST, output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, serialized_options=_b( - '\202\323\344\223\002@";/v3beta1/{parent=projects/*/locations/*}:batchTranslateText:\001*' + '\202\323\344\223\002@";/v3beta1/{parent=projects/*/locations/*}:batchTranslateText:\001*\312A0\n\026BatchTranslateResponse\022\026BatchTranslateMetadata' ), ), _descriptor.MethodDescriptor( @@ -3483,7 +3849,7 @@ input_type=_CREATEGLOSSARYREQUEST, output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, serialized_options=_b( - '\202\323\344\223\002?"3/v3beta1/{parent=projects/*/locations/*}/glossaries:\010glossary' + '\202\323\344\223\002?"3/v3beta1/{parent=projects/*/locations/*}/glossaries:\010glossary\332A\017parent,glossary\312A"\n\010Glossary\022\026CreateGlossaryMetadata' ), ), _descriptor.MethodDescriptor( @@ -3494,7 +3860,7 @@ input_type=_LISTGLOSSARIESREQUEST, output_type=_LISTGLOSSARIESRESPONSE, serialized_options=_b( - "\202\323\344\223\0025\0223/v3beta1/{parent=projects/*/locations/*}/glossaries" + "\202\323\344\223\0025\0223/v3beta1/{parent=projects/*/locations/*}/glossaries\332A\006parent\332A\rparent,filter" ), ), _descriptor.MethodDescriptor( @@ -3505,7 +3871,7 @@ input_type=_GETGLOSSARYREQUEST, output_type=_GLOSSARY, serialized_options=_b( - "\202\323\344\223\0025\0223/v3beta1/{name=projects/*/locations/*/glossaries/*}" + "\202\323\344\223\0025\0223/v3beta1/{name=projects/*/locations/*/glossaries/*}\332A\004name" ), ), _descriptor.MethodDescriptor( @@ -3516,7 +3882,7 @@ input_type=_DELETEGLOSSARYREQUEST, output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, serialized_options=_b( - "\202\323\344\223\0025*3/v3beta1/{name=projects/*/locations/*/glossaries/*}" + "\202\323\344\223\0025*3/v3beta1/{name=projects/*/locations/*/glossaries/*}\332A\004name\312A0\n\026DeleteGlossaryResponse\022\026DeleteGlossaryMetadata" ), ), ], diff --git a/translate/setup.py b/translate/setup.py index 5bacdb534155..24d632a90d00 100644 --- a/translate/setup.py +++ b/translate/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-translate" description = "Google Cloud Translation API client library" -version = "1.6.0" +version = "2.0.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' diff --git a/translate/synth.metadata b/translate/synth.metadata index cd7d315b9cbf..6c4de912fede 100644 --- a/translate/synth.metadata +++ b/translate/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-08-06T12:45:19.878568Z", + "updateTime": "2019-10-18T22:49:41.466785Z", "sources": [ { "generator": { "name": "artman", - "version": "0.32.1", - "dockerImage": "googleapis/artman@sha256:a684d40ba9a4e15946f5f2ca6b4bd9fe301192f522e9de4fff622118775f309b" + "version": "0.40.0", + "dockerImage": "googleapis/artman@sha256:fd2b49cce3d652929cc80157ec2d91bebe993f7cd4e89afaad80f9c785f8bf36" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "e699b0cba64ffddfae39633417180f1f65875896", - "internalRef": "261759677" + "sha": "0e9a6d15fcb944ed40921ba0aad2082ee1bc7edd", + "internalRef": "275543900" } }, { @@ -34,6 +34,16 @@ "generator": "gapic", "config": "google/cloud/translate/artman_translate_v3beta1.yaml" } + }, + { + "client": { + "source": "googleapis", + "apiName": "translate", + "apiVersion": "v3", + "language": "python", + "generator": "gapic", + "config": "google/cloud/translate/artman_translate_v3.yaml" + } } ] } \ No newline at end of file diff --git a/translate/synth.py b/translate/synth.py index dd81273af78b..14a2ad464c37 100644 --- a/translate/synth.py +++ b/translate/synth.py @@ -19,7 +19,7 @@ gapic = gcp.GAPICGenerator() common = gcp.CommonTemplates() -versions = ["v3beta1"] +versions = ["v3beta1", "v3"] excludes = [ "setup.py", @@ -41,12 +41,15 @@ s.move(library / "tests") s.move(library / f"docs/gapic/{version}") -# translation -> translate -s.replace( - "google/**/translation_service_pb2_grpc.py", - "google.cloud.translation_v3beta1.proto", - "google.cloud.translate_v3beta1.proto", -) + # translation -> translate + s.replace( + "google/**/translation_service_pb2_grpc.py", + f"google.cloud.translation_{version}.proto", + f"google.cloud.translate_{version}.proto", + ) + +# Use the highest version library to generate documentation import alias. +s.move(library / "google/cloud/translate.py") s.replace( "google/cloud/**/translation_service_pb2.py", diff --git a/translate/tests/system.py b/translate/tests/system.py index c586411b6111..b92ead125acf 100644 --- a/translate/tests/system.py +++ b/translate/tests/system.py @@ -16,7 +16,7 @@ import unittest -from google.cloud import translate +from google.cloud import translate_v2 class Config(object): @@ -30,7 +30,7 @@ class Config(object): def setUpModule(): - Config.CLIENT = translate.Client() + Config.CLIENT = translate_v2.Client() class TestTranslate(unittest.TestCase): diff --git a/translate/tests/system/test_vpcsc.py b/translate/tests/system/test_vpcsc.py new file mode 100644 index 000000000000..427d0be6757d --- /dev/null +++ b/translate/tests/system/test_vpcsc.py @@ -0,0 +1,166 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Unit tests for VPC-SC.""" + +import os +import pytest + +from google.api_core import exceptions +from google.cloud import translate_v3beta1 + + +IS_INSIDE_VPCSC = "GOOGLE_CLOUD_TESTS_IN_VPCSC" in os.environ +# If IS_INSIDE_VPCSC is set, these environment variables should also be set +if IS_INSIDE_VPCSC: + PROJECT_INSIDE = os.environ["PROJECT_ID"] + PROJECT_OUTSIDE = os.environ["GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT"] + + +class TestVPCServiceControl(object): + @classmethod + def setup(self): + self._client = translate_v3beta1.TranslationServiceClient() + self._parent_inside = self._client.location_path(PROJECT_INSIDE, "us-central1") + self._parent_outside = self._client.location_path( + PROJECT_OUTSIDE, "us-central1" + ) + + def make_glossary_name(project_id): + return "projects/{0}/locations/us-central1/glossaries/fake_glossary".format( + project_id + ) + + self._glossary_name_inside = make_glossary_name(PROJECT_INSIDE) + self._glossary_name_outside = make_glossary_name(PROJECT_OUTSIDE) + + @staticmethod + def _is_rejected(call): + try: + responses = call() + print("responses: ", responses) + except exceptions.PermissionDenied as e: + print("PermissionDenied Exception: ", e) + return e.message == "Request is prohibited by organization's policy" + except Exception as e: + print("Other Exception: ", e) + pass + return False + + @staticmethod + def _do_test(delayed_inside, delayed_outside): + assert TestVPCServiceControl._is_rejected(delayed_outside) + assert not (TestVPCServiceControl._is_rejected(delayed_inside)) + + @pytest.mark.skipif( + not IS_INSIDE_VPCSC, + reason="This test must be run in VPCSC. To enable this test, set the environment variable GOOGLE_CLOUD_TESTS_IN_VPCSC to True", + ) + def test_create_glossary(self): + def make_glossary(project_id): + return { + "name": "projects/{0}/locations/us-central1/glossaries/fake_glossary".format( + project_id + ), + "language_codes_set": {"language_codes": ["en", "ja"]}, + "input_config": { + "gcs_source": {"input_uri": "gs://fake-bucket/fake_glossary.csv"} + }, + } + + glossary_inside = make_glossary(PROJECT_INSIDE) + + def delayed_inside(): + return self._client.create_glossary(self._parent_inside, glossary_inside) + + glossary_outside = make_glossary(PROJECT_OUTSIDE) + + def delayed_outside(): + return self._client.create_glossary(self._parent_outside, glossary_outside) + + TestVPCServiceControl._do_test(delayed_inside, delayed_outside) + + @pytest.mark.skipif( + not IS_INSIDE_VPCSC, + reason="This test must be run in VPCSC. To enable this test, set the environment variable GOOGLE_CLOUD_TESTS_IN_VPCSC to True", + ) + def test_list_glossaries(self): + # list_glossaries() returns an GRPCIterator instance, and we need to actually iterate through it + # by calling _next_page() to get real response. + def delayed_inside(): + return self._client.list_glossaries(self._parent_inside)._next_page() + + def delayed_outside(): + return self._client.list_glossaries(self._parent_outside)._next_page() + + TestVPCServiceControl._do_test(delayed_inside, delayed_outside) + + @pytest.mark.skipif( + not IS_INSIDE_VPCSC, + reason="This test must be run in VPCSC. To enable this test, set the environment variable GOOGLE_CLOUD_TESTS_IN_VPCSC to True", + ) + def test_get_glossary(self): + def delayed_inside(): + return self._client.get_glossary(self._glossary_name_inside) + + def delayed_outside(): + return self._client.get_glossary(self._glossary_name_outside) + + TestVPCServiceControl._do_test(delayed_inside, delayed_outside) + + @pytest.mark.skipif( + not IS_INSIDE_VPCSC, + reason="This test must be run in VPCSC. To enable this test, set the environment variable GOOGLE_CLOUD_TESTS_IN_VPCSC to True", + ) + def test_delete_glossary(self): + def delayed_inside(): + return self._client.delete_glossary(self._glossary_name_inside) + + def delayed_outside(): + return self._client.delete_glossary(self._glossary_name_outside) + + TestVPCServiceControl._do_test(delayed_inside, delayed_outside) + + @pytest.mark.skipif( + not IS_INSIDE_VPCSC, + reason="This test must be run in VPCSC. To enable this test, set the environment variable GOOGLE_CLOUD_TESTS_IN_VPCSC to True", + ) + def test_batch_translate_text(self): + source_language_code = "en" + target_language_codes = ["es"] + input_configs = [{"gcs_source": {"input_uri": "gs://fake-bucket/*"}}] + output_config = { + "gcs_destination": {"output_uri_prefix": "gs://fake-bucket/output/"} + } + + def delayed_inside(): + return self._client.batch_translate_text( + self._parent_inside, + source_language_code, + target_language_codes, + input_configs, + output_config, + ) + + def delayed_outside(): + return self._client.batch_translate_text( + self._parent_outside, + source_language_code, + target_language_codes, + input_configs, + output_config, + ) + + TestVPCServiceControl._do_test(delayed_inside, delayed_outside) diff --git a/translate/tests/unit/gapic/v3/test_translation_service_client_v3.py b/translate/tests/unit/gapic/v3/test_translation_service_client_v3.py new file mode 100644 index 000000000000..16180aace4ad --- /dev/null +++ b/translate/tests/unit/gapic/v3/test_translation_service_client_v3.py @@ -0,0 +1,470 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Unit tests.""" + +import mock +import pytest + +from google.rpc import status_pb2 + +from google.cloud import translate_v3 +from google.cloud.translate_v3.proto import translation_service_pb2 +from google.longrunning import operations_pb2 + + +class MultiCallableStub(object): + """Stub for the grpc.UnaryUnaryMultiCallable interface.""" + + def __init__(self, method, channel_stub): + self.method = method + self.channel_stub = channel_stub + + def __call__(self, request, timeout=None, metadata=None, credentials=None): + self.channel_stub.requests.append((self.method, request)) + + response = None + if self.channel_stub.responses: + response = self.channel_stub.responses.pop() + + if isinstance(response, Exception): + raise response + + if response: + return response + + +class ChannelStub(object): + """Stub for the grpc.Channel interface.""" + + def __init__(self, responses=[]): + self.responses = responses + self.requests = [] + + def unary_unary(self, method, request_serializer=None, response_deserializer=None): + return MultiCallableStub(method, self) + + +class CustomException(Exception): + pass + + +class TestTranslationServiceClient(object): + def test_translate_text(self): + # Setup Expected Response + expected_response = {} + expected_response = translation_service_pb2.TranslateTextResponse( + **expected_response + ) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = translate_v3.TranslationServiceClient() + + # Setup Request + contents = [] + target_language_code = "targetLanguageCode1323228230" + parent = client.location_path("[PROJECT]", "[LOCATION]") + + response = client.translate_text(contents, target_language_code, parent) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = translation_service_pb2.TranslateTextRequest( + contents=contents, target_language_code=target_language_code, parent=parent + ) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_translate_text_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = translate_v3.TranslationServiceClient() + + # Setup request + contents = [] + target_language_code = "targetLanguageCode1323228230" + parent = client.location_path("[PROJECT]", "[LOCATION]") + + with pytest.raises(CustomException): + client.translate_text(contents, target_language_code, parent) + + def test_detect_language(self): + # Setup Expected Response + expected_response = {} + expected_response = translation_service_pb2.DetectLanguageResponse( + **expected_response + ) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = translate_v3.TranslationServiceClient() + + # Setup Request + parent = client.location_path("[PROJECT]", "[LOCATION]") + + response = client.detect_language(parent) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = translation_service_pb2.DetectLanguageRequest(parent=parent) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_detect_language_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = translate_v3.TranslationServiceClient() + + # Setup request + parent = client.location_path("[PROJECT]", "[LOCATION]") + + with pytest.raises(CustomException): + client.detect_language(parent) + + def test_get_supported_languages(self): + # Setup Expected Response + expected_response = {} + expected_response = translation_service_pb2.SupportedLanguages( + **expected_response + ) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = translate_v3.TranslationServiceClient() + + # Setup Request + parent = client.location_path("[PROJECT]", "[LOCATION]") + + response = client.get_supported_languages(parent) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = translation_service_pb2.GetSupportedLanguagesRequest( + parent=parent + ) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_get_supported_languages_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = translate_v3.TranslationServiceClient() + + # Setup request + parent = client.location_path("[PROJECT]", "[LOCATION]") + + with pytest.raises(CustomException): + client.get_supported_languages(parent) + + def test_batch_translate_text(self): + # Setup Expected Response + total_characters = 1368640955 + translated_characters = 1337326221 + failed_characters = 1723028396 + expected_response = { + "total_characters": total_characters, + "translated_characters": translated_characters, + "failed_characters": failed_characters, + } + expected_response = translation_service_pb2.BatchTranslateResponse( + **expected_response + ) + operation = operations_pb2.Operation( + name="operations/test_batch_translate_text", done=True + ) + operation.response.Pack(expected_response) + + # Mock the API response + channel = ChannelStub(responses=[operation]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = translate_v3.TranslationServiceClient() + + # Setup Request + parent = client.location_path("[PROJECT]", "[LOCATION]") + source_language_code = "sourceLanguageCode1687263568" + target_language_codes = [] + input_configs = [] + output_config = {} + + response = client.batch_translate_text( + parent, + source_language_code, + target_language_codes, + input_configs, + output_config, + ) + result = response.result() + assert expected_response == result + + assert len(channel.requests) == 1 + expected_request = translation_service_pb2.BatchTranslateTextRequest( + parent=parent, + source_language_code=source_language_code, + target_language_codes=target_language_codes, + input_configs=input_configs, + output_config=output_config, + ) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_batch_translate_text_exception(self): + # Setup Response + error = status_pb2.Status() + operation = operations_pb2.Operation( + name="operations/test_batch_translate_text_exception", done=True + ) + operation.error.CopyFrom(error) + + # Mock the API response + channel = ChannelStub(responses=[operation]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = translate_v3.TranslationServiceClient() + + # Setup Request + parent = client.location_path("[PROJECT]", "[LOCATION]") + source_language_code = "sourceLanguageCode1687263568" + target_language_codes = [] + input_configs = [] + output_config = {} + + response = client.batch_translate_text( + parent, + source_language_code, + target_language_codes, + input_configs, + output_config, + ) + exception = response.exception() + assert exception.errors[0] == error + + def test_create_glossary(self): + # Setup Expected Response + name = "name3373707" + entry_count = 811131134 + expected_response = {"name": name, "entry_count": entry_count} + expected_response = translation_service_pb2.Glossary(**expected_response) + operation = operations_pb2.Operation( + name="operations/test_create_glossary", done=True + ) + operation.response.Pack(expected_response) + + # Mock the API response + channel = ChannelStub(responses=[operation]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = translate_v3.TranslationServiceClient() + + # Setup Request + parent = client.location_path("[PROJECT]", "[LOCATION]") + glossary = {} + + response = client.create_glossary(parent, glossary) + result = response.result() + assert expected_response == result + + assert len(channel.requests) == 1 + expected_request = translation_service_pb2.CreateGlossaryRequest( + parent=parent, glossary=glossary + ) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_create_glossary_exception(self): + # Setup Response + error = status_pb2.Status() + operation = operations_pb2.Operation( + name="operations/test_create_glossary_exception", done=True + ) + operation.error.CopyFrom(error) + + # Mock the API response + channel = ChannelStub(responses=[operation]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = translate_v3.TranslationServiceClient() + + # Setup Request + parent = client.location_path("[PROJECT]", "[LOCATION]") + glossary = {} + + response = client.create_glossary(parent, glossary) + exception = response.exception() + assert exception.errors[0] == error + + def test_list_glossaries(self): + # Setup Expected Response + next_page_token = "" + glossaries_element = {} + glossaries = [glossaries_element] + expected_response = { + "next_page_token": next_page_token, + "glossaries": glossaries, + } + expected_response = translation_service_pb2.ListGlossariesResponse( + **expected_response + ) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = translate_v3.TranslationServiceClient() + + # Setup Request + parent = client.location_path("[PROJECT]", "[LOCATION]") + + paged_list_response = client.list_glossaries(parent) + resources = list(paged_list_response) + assert len(resources) == 1 + + assert expected_response.glossaries[0] == resources[0] + + assert len(channel.requests) == 1 + expected_request = translation_service_pb2.ListGlossariesRequest(parent=parent) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_list_glossaries_exception(self): + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = translate_v3.TranslationServiceClient() + + # Setup request + parent = client.location_path("[PROJECT]", "[LOCATION]") + + paged_list_response = client.list_glossaries(parent) + with pytest.raises(CustomException): + list(paged_list_response) + + def test_get_glossary(self): + # Setup Expected Response + name_2 = "name2-1052831874" + entry_count = 811131134 + expected_response = {"name": name_2, "entry_count": entry_count} + expected_response = translation_service_pb2.Glossary(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = translate_v3.TranslationServiceClient() + + # Setup Request + name = client.glossary_path("[PROJECT]", "[LOCATION]", "[GLOSSARY]") + + response = client.get_glossary(name) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = translation_service_pb2.GetGlossaryRequest(name=name) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_get_glossary_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = translate_v3.TranslationServiceClient() + + # Setup request + name = client.glossary_path("[PROJECT]", "[LOCATION]", "[GLOSSARY]") + + with pytest.raises(CustomException): + client.get_glossary(name) + + def test_delete_glossary(self): + # Setup Expected Response + name_2 = "name2-1052831874" + expected_response = {"name": name_2} + expected_response = translation_service_pb2.DeleteGlossaryResponse( + **expected_response + ) + operation = operations_pb2.Operation( + name="operations/test_delete_glossary", done=True + ) + operation.response.Pack(expected_response) + + # Mock the API response + channel = ChannelStub(responses=[operation]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = translate_v3.TranslationServiceClient() + + # Setup Request + name = client.glossary_path("[PROJECT]", "[LOCATION]", "[GLOSSARY]") + + response = client.delete_glossary(name) + result = response.result() + assert expected_response == result + + assert len(channel.requests) == 1 + expected_request = translation_service_pb2.DeleteGlossaryRequest(name=name) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_delete_glossary_exception(self): + # Setup Response + error = status_pb2.Status() + operation = operations_pb2.Operation( + name="operations/test_delete_glossary_exception", done=True + ) + operation.error.CopyFrom(error) + + # Mock the API response + channel = ChannelStub(responses=[operation]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = translate_v3.TranslationServiceClient() + + # Setup Request + name = client.glossary_path("[PROJECT]", "[LOCATION]", "[GLOSSARY]") + + response = client.delete_glossary(name) + exception = response.exception() + assert exception.errors[0] == error diff --git a/translate/tests/unit/gapic/v3beta1/test_translation_service_client_v3beta1.py b/translate/tests/unit/gapic/v3beta1/test_translation_service_client_v3beta1.py index c4d45f92c69f..6bd32d82c20a 100644 --- a/translate/tests/unit/gapic/v3beta1/test_translation_service_client_v3beta1.py +++ b/translate/tests/unit/gapic/v3beta1/test_translation_service_client_v3beta1.py @@ -80,13 +80,14 @@ def test_translate_text(self): # Setup Request contents = [] target_language_code = "targetLanguageCode1323228230" + parent = client.location_path("[PROJECT]", "[LOCATION]") - response = client.translate_text(contents, target_language_code) + response = client.translate_text(contents, target_language_code, parent) assert expected_response == response assert len(channel.requests) == 1 expected_request = translation_service_pb2.TranslateTextRequest( - contents=contents, target_language_code=target_language_code + contents=contents, target_language_code=target_language_code, parent=parent ) actual_request = channel.requests[0][1] assert expected_request == actual_request @@ -102,9 +103,10 @@ def test_translate_text_exception(self): # Setup request contents = [] target_language_code = "targetLanguageCode1323228230" + parent = client.location_path("[PROJECT]", "[LOCATION]") with pytest.raises(CustomException): - client.translate_text(contents, target_language_code) + client.translate_text(contents, target_language_code, parent) def test_detect_language(self): # Setup Expected Response @@ -120,11 +122,14 @@ def test_detect_language(self): create_channel.return_value = channel client = translate_v3beta1.TranslationServiceClient() - response = client.detect_language() + # Setup Request + parent = client.location_path("[PROJECT]", "[LOCATION]") + + response = client.detect_language(parent) assert expected_response == response assert len(channel.requests) == 1 - expected_request = translation_service_pb2.DetectLanguageRequest() + expected_request = translation_service_pb2.DetectLanguageRequest(parent=parent) actual_request = channel.requests[0][1] assert expected_request == actual_request @@ -136,8 +141,11 @@ def test_detect_language_exception(self): create_channel.return_value = channel client = translate_v3beta1.TranslationServiceClient() + # Setup request + parent = client.location_path("[PROJECT]", "[LOCATION]") + with pytest.raises(CustomException): - client.detect_language() + client.detect_language(parent) def test_get_supported_languages(self): # Setup Expected Response @@ -153,11 +161,16 @@ def test_get_supported_languages(self): create_channel.return_value = channel client = translate_v3beta1.TranslationServiceClient() - response = client.get_supported_languages() + # Setup Request + parent = client.location_path("[PROJECT]", "[LOCATION]") + + response = client.get_supported_languages(parent) assert expected_response == response assert len(channel.requests) == 1 - expected_request = translation_service_pb2.GetSupportedLanguagesRequest() + expected_request = translation_service_pb2.GetSupportedLanguagesRequest( + parent=parent + ) actual_request = channel.requests[0][1] assert expected_request == actual_request @@ -169,8 +182,11 @@ def test_get_supported_languages_exception(self): create_channel.return_value = channel client = translate_v3beta1.TranslationServiceClient() + # Setup request + parent = client.location_path("[PROJECT]", "[LOCATION]") + with pytest.raises(CustomException): - client.get_supported_languages() + client.get_supported_languages(parent) def test_batch_translate_text(self): # Setup Expected Response @@ -198,19 +214,25 @@ def test_batch_translate_text(self): client = translate_v3beta1.TranslationServiceClient() # Setup Request + parent = client.location_path("[PROJECT]", "[LOCATION]") source_language_code = "sourceLanguageCode1687263568" target_language_codes = [] input_configs = [] output_config = {} response = client.batch_translate_text( - source_language_code, target_language_codes, input_configs, output_config + parent, + source_language_code, + target_language_codes, + input_configs, + output_config, ) result = response.result() assert expected_response == result assert len(channel.requests) == 1 expected_request = translation_service_pb2.BatchTranslateTextRequest( + parent=parent, source_language_code=source_language_code, target_language_codes=target_language_codes, input_configs=input_configs, @@ -235,13 +257,18 @@ def test_batch_translate_text_exception(self): client = translate_v3beta1.TranslationServiceClient() # Setup Request + parent = client.location_path("[PROJECT]", "[LOCATION]") source_language_code = "sourceLanguageCode1687263568" target_language_codes = [] input_configs = [] output_config = {} response = client.batch_translate_text( - source_language_code, target_language_codes, input_configs, output_config + parent, + source_language_code, + target_language_codes, + input_configs, + output_config, ) exception = response.exception() assert exception.errors[0] == error @@ -322,14 +349,17 @@ def test_list_glossaries(self): create_channel.return_value = channel client = translate_v3beta1.TranslationServiceClient() - paged_list_response = client.list_glossaries() + # Setup Request + parent = client.location_path("[PROJECT]", "[LOCATION]") + + paged_list_response = client.list_glossaries(parent) resources = list(paged_list_response) assert len(resources) == 1 assert expected_response.glossaries[0] == resources[0] assert len(channel.requests) == 1 - expected_request = translation_service_pb2.ListGlossariesRequest() + expected_request = translation_service_pb2.ListGlossariesRequest(parent=parent) actual_request = channel.requests[0][1] assert expected_request == actual_request @@ -340,7 +370,10 @@ def test_list_glossaries_exception(self): create_channel.return_value = channel client = translate_v3beta1.TranslationServiceClient() - paged_list_response = client.list_glossaries() + # Setup request + parent = client.location_path("[PROJECT]", "[LOCATION]") + + paged_list_response = client.list_glossaries(parent) with pytest.raises(CustomException): list(paged_list_response) diff --git a/translate/tests/unit/test__http.py b/translate/tests/unit/v2/test__http.py similarity index 100% rename from translate/tests/unit/test__http.py rename to translate/tests/unit/v2/test__http.py diff --git a/translate/tests/unit/test_client.py b/translate/tests/unit/v2/test_client.py similarity index 99% rename from translate/tests/unit/test_client.py rename to translate/tests/unit/v2/test_client.py index dfbc5affaa73..2c0f72b1a1e5 100644 --- a/translate/tests/unit/test_client.py +++ b/translate/tests/unit/v2/test_client.py @@ -18,7 +18,7 @@ class TestClient(unittest.TestCase): @staticmethod def _get_target_class(): - from google.cloud.translate import Client + from google.cloud.translate_v2 import Client return Client diff --git a/videointelligence/docs/conf.py b/videointelligence/docs/conf.py index 454c487e70fb..ceaf0b34c248 100644 --- a/videointelligence/docs/conf.py +++ b/videointelligence/docs/conf.py @@ -344,7 +344,7 @@ "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("https://requests.kennethreitz.org/en/stable/", None), + "requests": ("https://requests.kennethreitz.org/en/master/", None), "fastavro": ("https://fastavro.readthedocs.io/en/stable/", None), "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), } diff --git a/videointelligence/docs/gapic/v1beta1/api.rst b/videointelligence/docs/gapic/v1beta1/api.rst deleted file mode 100644 index 2ec043d68788..000000000000 --- a/videointelligence/docs/gapic/v1beta1/api.rst +++ /dev/null @@ -1,6 +0,0 @@ -Client for Google Cloud Video Intelligence API -============================================== - -.. automodule:: google.cloud.videointelligence_v1beta1 - :members: - :inherited-members: \ No newline at end of file diff --git a/videointelligence/docs/gapic/v1beta1/types.rst b/videointelligence/docs/gapic/v1beta1/types.rst deleted file mode 100644 index 00374b5af4d5..000000000000 --- a/videointelligence/docs/gapic/v1beta1/types.rst +++ /dev/null @@ -1,5 +0,0 @@ -Types for Google Cloud Video Intelligence API Client -==================================================== - -.. automodule:: google.cloud.videointelligence_v1beta1.types - :members: \ No newline at end of file diff --git a/videointelligence/docs/index.rst b/videointelligence/docs/index.rst index 868afa75297c..5a888812a870 100644 --- a/videointelligence/docs/index.rst +++ b/videointelligence/docs/index.rst @@ -26,7 +26,7 @@ An API and type reference is provided for this beta: gapic/v1p3beta1/api gapic/v1p3beta1/types -The previous beta releases, spelled ``v1p2beta1``, ``v1p1beta1``, ``v1beta1``, and +The previous beta releases, spelled ``v1p2beta1``, ``v1p1beta1``, and ``v1beta2``, are provided to continue to support code previously written against them. In order to use ththem, you will want to import from e.g. ``google.cloud.videointelligence_v1beta2`` in lieu of @@ -41,8 +41,6 @@ An API and type reference is provided the these betas also: gapic/v1p2beta1/types gapic/v1p1beta1/api gapic/v1p1beta1/types - gapic/v1beta1/api - gapic/v1beta1/types gapic/v1beta2/api gapic/v1beta2/types diff --git a/videointelligence/google/cloud/videointelligence_v1/gapic/video_intelligence_service_client.py b/videointelligence/google/cloud/videointelligence_v1/gapic/video_intelligence_service_client.py index a9058fb38c33..86210ceaad2e 100644 --- a/videointelligence/google/cloud/videointelligence_v1/gapic/video_intelligence_service_client.py +++ b/videointelligence/google/cloud/videointelligence_v1/gapic/video_intelligence_service_client.py @@ -191,9 +191,9 @@ def __init__( # Service calls def annotate_video( self, + features, input_uri=None, input_content=None, - features=None, video_context=None, output_uri=None, location_id=None, @@ -213,11 +213,11 @@ def annotate_video( >>> >>> client = videointelligence_v1.VideoIntelligenceServiceClient() >>> - >>> input_uri = 'gs://cloud-samples-data/video/cat.mp4' >>> features_element = enums.Feature.LABEL_DETECTION >>> features = [features_element] + >>> input_uri = 'gs://cloud-samples-data/video/cat.mp4' >>> - >>> response = client.annotate_video(input_uri=input_uri, features=features) + >>> response = client.annotate_video(features, input_uri=input_uri) >>> >>> def callback(operation_future): ... # Handle result. @@ -229,6 +229,7 @@ def annotate_video( >>> metadata = response.metadata() Args: + features (list[~google.cloud.videointelligence_v1.types.Feature]): Required. Requested video annotation features. input_uri (str): Input video location. Currently, only `Google Cloud Storage `__ URIs are supported, which must be specified in the following format: ``gs://bucket-id/object-id`` @@ -241,19 +242,18 @@ def annotate_video( request as ``input_content``. If set, ``input_content`` should be unset. input_content (bytes): The video data bytes. If unset, the input video(s) should be specified via ``input_uri``. If set, ``input_uri`` should be unset. - features (list[~google.cloud.videointelligence_v1.types.Feature]): Requested video annotation features. video_context (Union[dict, ~google.cloud.videointelligence_v1.types.VideoContext]): Additional video context and/or feature-specific parameters. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.videointelligence_v1.types.VideoContext` - output_uri (str): Optional location where the output (in JSON format) should be stored. + output_uri (str): Optional. Location where the output (in JSON format) should be stored. Currently, only `Google Cloud Storage `__ URIs are supported, which must be specified in the following format: ``gs://bucket-id/object-id`` (other URI formats return ``google.rpc.Code.INVALID_ARGUMENT``). For more information, see `Request URIs `__. - location_id (str): Optional cloud region where annotation should take place. Supported + location_id (str): Optional. Cloud region where annotation should take place. Supported cloud regions: ``us-east1``, ``us-west1``, ``europe-west1``, ``asia-east1``. If no region is specified, a region will be determined based on video file location. @@ -288,9 +288,9 @@ def annotate_video( ) request = video_intelligence_pb2.AnnotateVideoRequest( + features=features, input_uri=input_uri, input_content=input_content, - features=features, video_context=video_context, output_uri=output_uri, location_id=location_id, diff --git a/videointelligence/google/cloud/videointelligence_v1/gapic/video_intelligence_service_client_config.py b/videointelligence/google/cloud/videointelligence_v1/gapic/video_intelligence_service_client_config.py index b9eef5e2a6a7..70a9b8812235 100644 --- a/videointelligence/google/cloud/videointelligence_v1/gapic/video_intelligence_service_client_config.py +++ b/videointelligence/google/cloud/videointelligence_v1/gapic/video_intelligence_service_client_config.py @@ -7,19 +7,19 @@ }, "retry_params": { "default": { - "initial_retry_delay_millis": 1000, - "retry_delay_multiplier": 2.5, - "max_retry_delay_millis": 120000, - "initial_rpc_timeout_millis": 120000, + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 20000, "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 120000, + "max_rpc_timeout_millis": 20000, "total_timeout_millis": 600000, } }, "methods": { "AnnotateVideo": { - "timeout_millis": 600000, - "retry_codes_name": "idempotent", + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", "retry_params_name": "default", } }, diff --git a/videointelligence/google/cloud/videointelligence_v1/proto/video_intelligence.proto b/videointelligence/google/cloud/videointelligence_v1/proto/video_intelligence.proto index 4c7a0ad131a2..6611e9eb2c61 100644 --- a/videointelligence/google/cloud/videointelligence_v1/proto/video_intelligence.proto +++ b/videointelligence/google/cloud/videointelligence_v1/proto/video_intelligence.proto @@ -19,6 +19,7 @@ package google.cloud.videointelligence.v1; import "google/api/annotations.proto"; import "google/api/client.proto"; +import "google/api/field_behavior.proto"; import "google/longrunning/operations.proto"; import "google/protobuf/duration.proto"; import "google/protobuf/timestamp.proto"; @@ -35,19 +36,22 @@ option ruby_package = "Google::Cloud::VideoIntelligence::V1"; // Service that implements Google Cloud Video Intelligence API. service VideoIntelligenceService { option (google.api.default_host) = "videointelligence.googleapis.com"; - option (google.api.oauth_scopes) = - "https://www.googleapis.com/auth/cloud-platform"; + option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; // Performs asynchronous video annotation. Progress and results can be // retrieved through the `google.longrunning.Operations` interface. // `Operation.metadata` contains `AnnotateVideoProgress` (progress). // `Operation.response` contains `AnnotateVideoResponse` (results). - rpc AnnotateVideo(AnnotateVideoRequest) - returns (google.longrunning.Operation) { + rpc AnnotateVideo(AnnotateVideoRequest) returns (google.longrunning.Operation) { option (google.api.http) = { post: "/v1/videos:annotate" body: "*" }; + option (google.api.method_signature) = "input_uri,features"; + option (google.longrunning.operation_info) = { + response_type: "AnnotateVideoResponse" + metadata_type: "AnnotateVideoProgress" + }; } } @@ -57,10 +61,10 @@ message AnnotateVideoRequest { // [Google Cloud Storage](https://cloud.google.com/storage/) URIs are // supported, which must be specified in the following format: // `gs://bucket-id/object-id` (other URI formats return - // [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]). For - // more information, see [Request URIs](/storage/docs/reference-uris). A video - // URI may include wildcards in `object-id`, and thus identify multiple - // videos. Supported wildcards: '*' to match 0 or more characters; + // [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]). For more information, see + // [Request URIs](/storage/docs/reference-uris). + // A video URI may include wildcards in `object-id`, and thus identify + // multiple videos. Supported wildcards: '*' to match 0 or more characters; // '?' to match 1 character. If unset, the input video should be embedded // in the request as `input_content`. If set, `input_content` should be unset. string input_uri = 1; @@ -70,24 +74,24 @@ message AnnotateVideoRequest { // If set, `input_uri` should be unset. bytes input_content = 6; - // Requested video annotation features. - repeated Feature features = 2; + // Required. Requested video annotation features. + repeated Feature features = 2 [(google.api.field_behavior) = REQUIRED]; // Additional video context and/or feature-specific parameters. VideoContext video_context = 3; - // Optional location where the output (in JSON format) should be stored. + // Optional. Location where the output (in JSON format) should be stored. // Currently, only [Google Cloud Storage](https://cloud.google.com/storage/) // URIs are supported, which must be specified in the following format: // `gs://bucket-id/object-id` (other URI formats return - // [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]). For - // more information, see [Request URIs](/storage/docs/reference-uris). - string output_uri = 4; + // [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]). For more information, see + // [Request URIs](/storage/docs/reference-uris). + string output_uri = 4 [(google.api.field_behavior) = OPTIONAL]; - // Optional cloud region where annotation should take place. Supported cloud + // Optional. Cloud region where annotation should take place. Supported cloud // regions: `us-east1`, `us-west1`, `europe-west1`, `asia-east1`. If no region // is specified, a region will be determined based on video file location. - string location_id = 5; + string location_id = 5 [(google.api.field_behavior) = OPTIONAL]; } // Video context and/or feature-specific parameters. @@ -119,6 +123,69 @@ message VideoContext { ObjectTrackingConfig object_tracking_config = 13; } +// Video annotation feature. +enum Feature { + // Unspecified. + FEATURE_UNSPECIFIED = 0; + + // Label detection. Detect objects, such as dog or flower. + LABEL_DETECTION = 1; + + // Shot change detection. + SHOT_CHANGE_DETECTION = 2; + + // Explicit content detection. + EXPLICIT_CONTENT_DETECTION = 3; + + // Human face detection and tracking. + FACE_DETECTION = 4; + + // Speech transcription. + SPEECH_TRANSCRIPTION = 6; + + // OCR text detection and tracking. + TEXT_DETECTION = 7; + + // Object detection and tracking. + OBJECT_TRACKING = 9; +} + +// Label detection mode. +enum LabelDetectionMode { + // Unspecified. + LABEL_DETECTION_MODE_UNSPECIFIED = 0; + + // Detect shot-level labels. + SHOT_MODE = 1; + + // Detect frame-level labels. + FRAME_MODE = 2; + + // Detect both shot-level and frame-level labels. + SHOT_AND_FRAME_MODE = 3; +} + +// Bucketized representation of likelihood. +enum Likelihood { + // Unspecified likelihood. + LIKELIHOOD_UNSPECIFIED = 0; + + // Very unlikely. + VERY_UNLIKELY = 1; + + // Unlikely. + UNLIKELY = 2; + + // Possible. + POSSIBLE = 3; + + // Likely. + LIKELY = 4; + + // Very likely. + VERY_LIKELY = 5; +} + // Config for LABEL_DETECTION. message LabelDetectionConfig { // What labels should be detected with LABEL_DETECTION, in addition to @@ -161,9 +228,9 @@ message ShotChangeDetectionConfig { string model = 1; } -// Config for EXPLICIT_CONTENT_DETECTION. -message ExplicitContentDetectionConfig { - // Model to use for explicit content detection. +// Config for OBJECT_TRACKING. +message ObjectTrackingConfig { + // Model to use for object tracking. // Supported values: "builtin/stable" (the default if unset) and // "builtin/latest". string model = 1; @@ -180,9 +247,9 @@ message FaceDetectionConfig { bool include_bounding_boxes = 2; } -// Config for OBJECT_TRACKING. -message ObjectTrackingConfig { - // Model to use for object tracking. +// Config for EXPLICIT_CONTENT_DETECTION. +message ExplicitContentDetectionConfig { + // Model to use for explicit content detection. // Supported values: "builtin/stable" (the default if unset) and // "builtin/latest". string model = 1; @@ -344,8 +411,11 @@ message VideoAnnotationResults { repeated LabelAnnotation segment_label_annotations = 2; // Presence label annotations on video level or user specified segment level. - // There is exactly one element for each unique label. This will eventually - // get publicly exposed and the restriction will be removed. + // There is exactly one element for each unique label. Compared to the + // existing topical `segment_label_annotations`, this field presents more + // fine-grained, segment-level labels detected in video content and is made + // available only when the client sets `LabelDetectionConfig.model` to + // "builtin/latest" in the request. repeated LabelAnnotation segment_presence_label_annotations = 23; // Topical label annotations on shot level. @@ -353,8 +423,10 @@ message VideoAnnotationResults { repeated LabelAnnotation shot_label_annotations = 3; // Presence label annotations on shot level. There is exactly one element for - // each unique label. This will eventually get publicly exposed and the - // restriction will be removed. + // each unique label. Compared to the existing topical + // `shot_label_annotations`, this field presents more fine-grained, shot-level + // labels detected in video content and is made available only when the client + // sets `LabelDetectionConfig.model` to "builtin/latest" in the request. repeated LabelAnnotation shot_presence_label_annotations = 24; // Label annotations on frame level. @@ -429,72 +501,71 @@ message AnnotateVideoProgress { // Config for SPEECH_TRANSCRIPTION. message SpeechTranscriptionConfig { - // *Required* The language of the supplied audio as a + // Required. *Required* The language of the supplied audio as a // [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tag. // Example: "en-US". // See [Language Support](https://cloud.google.com/speech/docs/languages) // for a list of the currently supported language codes. - string language_code = 1; + string language_code = 1 [(google.api.field_behavior) = REQUIRED]; - // *Optional* Maximum number of recognition hypotheses to be returned. + // Optional. Maximum number of recognition hypotheses to be returned. // Specifically, the maximum number of `SpeechRecognitionAlternative` messages // within each `SpeechTranscription`. The server may return fewer than // `max_alternatives`. Valid values are `0`-`30`. A value of `0` or `1` will // return a maximum of one. If omitted, will return a maximum of one. - int32 max_alternatives = 2; + int32 max_alternatives = 2 [(google.api.field_behavior) = OPTIONAL]; - // *Optional* If set to `true`, the server will attempt to filter out + // Optional. If set to `true`, the server will attempt to filter out // profanities, replacing all but the initial character in each filtered word // with asterisks, e.g. "f***". If set to `false` or omitted, profanities // won't be filtered out. - bool filter_profanity = 3; + bool filter_profanity = 3 [(google.api.field_behavior) = OPTIONAL]; - // *Optional* A means to provide context to assist the speech recognition. - repeated SpeechContext speech_contexts = 4; + // Optional. A means to provide context to assist the speech recognition. + repeated SpeechContext speech_contexts = 4 [(google.api.field_behavior) = OPTIONAL]; - // *Optional* If 'true', adds punctuation to recognition result hypotheses. + // Optional. If 'true', adds punctuation to recognition result hypotheses. // This feature is only available in select languages. Setting this for // requests in other languages has no effect at all. The default 'false' value // does not add punctuation to result hypotheses. NOTE: "This is currently // offered as an experimental service, complimentary to all users. In the // future this may be exclusively available as a premium feature." - bool enable_automatic_punctuation = 5; + bool enable_automatic_punctuation = 5 [(google.api.field_behavior) = OPTIONAL]; - // *Optional* For file formats, such as MXF or MKV, supporting multiple audio + // Optional. For file formats, such as MXF or MKV, supporting multiple audio // tracks, specify up to two tracks. Default: track 0. - repeated int32 audio_tracks = 6; + repeated int32 audio_tracks = 6 [(google.api.field_behavior) = OPTIONAL]; - // *Optional* If 'true', enables speaker detection for each recognized word in + // Optional. If 'true', enables speaker detection for each recognized word in // the top alternative of the recognition result using a speaker_tag provided // in the WordInfo. // Note: When this is true, we send all the words from the beginning of the // audio for the top alternative in every consecutive responses. // This is done in order to improve our speaker tags as our models learn to // identify the speakers in the conversation over time. - bool enable_speaker_diarization = 7; + bool enable_speaker_diarization = 7 [(google.api.field_behavior) = OPTIONAL]; - // *Optional* - // If set, specifies the estimated number of speakers in the conversation. + // Optional. If set, specifies the estimated number of speakers in the conversation. // If not set, defaults to '2'. // Ignored unless enable_speaker_diarization is set to true. - int32 diarization_speaker_count = 8; + int32 diarization_speaker_count = 8 [(google.api.field_behavior) = OPTIONAL]; - // *Optional* If `true`, the top result includes a list of words and the + // Optional. If `true`, the top result includes a list of words and the // confidence for those words. If `false`, no word-level confidence // information is returned. The default is `false`. - bool enable_word_confidence = 9; + bool enable_word_confidence = 9 [(google.api.field_behavior) = OPTIONAL]; } // Provides "hints" to the speech recognizer to favor specific words and phrases // in the results. message SpeechContext { - // *Optional* A list of strings containing words and phrases "hints" so that + // Optional. A list of strings containing words and phrases "hints" so that // the speech recognition is more likely to recognize them. This can be used // to improve the accuracy for specific words and phrases, for example, if // specific commands are typically spoken by the user. This can also be used // to add additional words to the vocabulary of the recognizer. See // [usage limits](https://cloud.google.com/speech/limits#content). - repeated string phrases = 1; + repeated string phrases = 1 [(google.api.field_behavior) = OPTIONAL]; } // A speech recognition result corresponding to a portion of the audio. @@ -505,11 +576,10 @@ message SpeechTranscription { // ranked by the recognizer. repeated SpeechRecognitionAlternative alternatives = 1; - // Output only. The - // [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tag of the - // language in this result. This language code was detected to have the most - // likelihood of being spoken in the audio. - string language_code = 2; + // Output only. The [BCP-47](https://www.rfc-editor.org/rfc/bcp/bcp47.txt) language tag of + // the language in this result. This language code was detected to have the + // most likelihood of being spoken in the audio. + string language_code = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; } // Alternative hypotheses (a.k.a. n-best list). @@ -523,12 +593,12 @@ message SpeechRecognitionAlternative { // This field is not guaranteed to be accurate and users should not rely on it // to be always provided. // The default of 0.0 is a sentinel value indicating `confidence` was not set. - float confidence = 2; + float confidence = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. A list of word-specific information for each recognized word. // Note: When `enable_speaker_diarization` is true, you will see all the words // from the beginning of the audio. - repeated WordInfo words = 3; + repeated WordInfo words = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; } // Word-specific information for recognized words. Word information is only @@ -556,13 +626,13 @@ message WordInfo { // This field is not guaranteed to be accurate and users should not rely on it // to be always provided. // The default of 0.0 is a sentinel value indicating `confidence` was not set. - float confidence = 4; + float confidence = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. A distinct integer value is assigned for every speaker within // the audio. This field specifies which one of those speakers was detected to // have spoken this word. Value ranges from 1 up to diarization_speaker_count, // and is only set if speaker diarization is enabled. - int32 speaker_tag = 5; + int32 speaker_tag = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; } // A vertex represents a 2D point in the image. @@ -673,66 +743,3 @@ message ObjectTrackingAnnotation { // Streaming mode: it can only be one ObjectTrackingFrame message in frames. repeated ObjectTrackingFrame frames = 2; } - -// Video annotation feature. -enum Feature { - // Unspecified. - FEATURE_UNSPECIFIED = 0; - - // Label detection. Detect objects, such as dog or flower. - LABEL_DETECTION = 1; - - // Shot change detection. - SHOT_CHANGE_DETECTION = 2; - - // Explicit content detection. - EXPLICIT_CONTENT_DETECTION = 3; - - // Human face detection and tracking. - FACE_DETECTION = 4; - - // Speech transcription. - SPEECH_TRANSCRIPTION = 6; - - // OCR text detection and tracking. - TEXT_DETECTION = 7; - - // Object detection and tracking. - OBJECT_TRACKING = 9; -} - -// Label detection mode. -enum LabelDetectionMode { - // Unspecified. - LABEL_DETECTION_MODE_UNSPECIFIED = 0; - - // Detect shot-level labels. - SHOT_MODE = 1; - - // Detect frame-level labels. - FRAME_MODE = 2; - - // Detect both shot-level and frame-level labels. - SHOT_AND_FRAME_MODE = 3; -} - -// Bucketized representation of likelihood. -enum Likelihood { - // Unspecified likelihood. - LIKELIHOOD_UNSPECIFIED = 0; - - // Very unlikely. - VERY_UNLIKELY = 1; - - // Unlikely. - UNLIKELY = 2; - - // Possible. - POSSIBLE = 3; - - // Likely. - LIKELY = 4; - - // Very likely. - VERY_LIKELY = 5; -} diff --git a/videointelligence/google/cloud/videointelligence_v1/proto/video_intelligence_pb2.py b/videointelligence/google/cloud/videointelligence_v1/proto/video_intelligence_pb2.py index 755f13437776..869eeae436ce 100644 --- a/videointelligence/google/cloud/videointelligence_v1/proto/video_intelligence_pb2.py +++ b/videointelligence/google/cloud/videointelligence_v1/proto/video_intelligence_pb2.py @@ -18,6 +18,7 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 from google.longrunning import ( operations_pb2 as google_dot_longrunning_dot_operations__pb2, ) @@ -34,11 +35,12 @@ "\n%com.google.cloud.videointelligence.v1B\035VideoIntelligenceServiceProtoP\001ZRgoogle.golang.org/genproto/googleapis/cloud/videointelligence/v1;videointelligence\252\002!Google.Cloud.VideoIntelligence.V1\312\002!Google\\Cloud\\VideoIntelligence\\V1\352\002$Google::Cloud::VideoIntelligence::V1" ), serialized_pb=_b( - '\n@google/cloud/videointelligence_v1/proto/video_intelligence.proto\x12!google.cloud.videointelligence.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a#google/longrunning/operations.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto"\xef\x01\n\x14\x41nnotateVideoRequest\x12\x11\n\tinput_uri\x18\x01 \x01(\t\x12\x15\n\rinput_content\x18\x06 \x01(\x0c\x12<\n\x08\x66\x65\x61tures\x18\x02 \x03(\x0e\x32*.google.cloud.videointelligence.v1.Feature\x12\x46\n\rvideo_context\x18\x03 \x01(\x0b\x32/.google.cloud.videointelligence.v1.VideoContext\x12\x12\n\noutput_uri\x18\x04 \x01(\t\x12\x13\n\x0blocation_id\x18\x05 \x01(\t"\xe6\x05\n\x0cVideoContext\x12\x41\n\x08segments\x18\x01 \x03(\x0b\x32/.google.cloud.videointelligence.v1.VideoSegment\x12W\n\x16label_detection_config\x18\x02 \x01(\x0b\x32\x37.google.cloud.videointelligence.v1.LabelDetectionConfig\x12\x62\n\x1cshot_change_detection_config\x18\x03 \x01(\x0b\x32<.google.cloud.videointelligence.v1.ShotChangeDetectionConfig\x12l\n!explicit_content_detection_config\x18\x04 \x01(\x0b\x32\x41.google.cloud.videointelligence.v1.ExplicitContentDetectionConfig\x12U\n\x15\x66\x61\x63\x65_detection_config\x18\x05 \x01(\x0b\x32\x36.google.cloud.videointelligence.v1.FaceDetectionConfig\x12\x61\n\x1bspeech_transcription_config\x18\x06 \x01(\x0b\x32<.google.cloud.videointelligence.v1.SpeechTranscriptionConfig\x12U\n\x15text_detection_config\x18\x08 \x01(\x0b\x32\x36.google.cloud.videointelligence.v1.TextDetectionConfig\x12W\n\x16object_tracking_config\x18\r \x01(\x0b\x32\x37.google.cloud.videointelligence.v1.ObjectTrackingConfig"\xdd\x01\n\x14LabelDetectionConfig\x12S\n\x14label_detection_mode\x18\x01 \x01(\x0e\x32\x35.google.cloud.videointelligence.v1.LabelDetectionMode\x12\x19\n\x11stationary_camera\x18\x02 \x01(\x08\x12\r\n\x05model\x18\x03 \x01(\t\x12"\n\x1a\x66rame_confidence_threshold\x18\x04 \x01(\x02\x12"\n\x1avideo_confidence_threshold\x18\x05 \x01(\x02"*\n\x19ShotChangeDetectionConfig\x12\r\n\x05model\x18\x01 \x01(\t"/\n\x1e\x45xplicitContentDetectionConfig\x12\r\n\x05model\x18\x01 \x01(\t"D\n\x13\x46\x61\x63\x65\x44\x65tectionConfig\x12\r\n\x05model\x18\x01 \x01(\t\x12\x1e\n\x16include_bounding_boxes\x18\x02 \x01(\x08"%\n\x14ObjectTrackingConfig\x12\r\n\x05model\x18\x01 \x01(\t"<\n\x13TextDetectionConfig\x12\x16\n\x0elanguage_hints\x18\x01 \x03(\t\x12\r\n\x05model\x18\x02 \x01(\t"x\n\x0cVideoSegment\x12\x34\n\x11start_time_offset\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x32\n\x0f\x65nd_time_offset\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration"d\n\x0cLabelSegment\x12@\n\x07segment\x18\x01 \x01(\x0b\x32/.google.cloud.videointelligence.v1.VideoSegment\x12\x12\n\nconfidence\x18\x02 \x01(\x02"P\n\nLabelFrame\x12.\n\x0btime_offset\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x12\n\nconfidence\x18\x02 \x01(\x02"G\n\x06\x45ntity\x12\x11\n\tentity_id\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x15\n\rlanguage_code\x18\x03 \x01(\t"\x94\x02\n\x0fLabelAnnotation\x12\x39\n\x06\x65ntity\x18\x01 \x01(\x0b\x32).google.cloud.videointelligence.v1.Entity\x12\x44\n\x11\x63\x61tegory_entities\x18\x02 \x03(\x0b\x32).google.cloud.videointelligence.v1.Entity\x12\x41\n\x08segments\x18\x03 \x03(\x0b\x32/.google.cloud.videointelligence.v1.LabelSegment\x12=\n\x06\x66rames\x18\x04 \x03(\x0b\x32-.google.cloud.videointelligence.v1.LabelFrame"\x95\x01\n\x14\x45xplicitContentFrame\x12.\n\x0btime_offset\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration\x12M\n\x16pornography_likelihood\x18\x02 \x01(\x0e\x32-.google.cloud.videointelligence.v1.Likelihood"d\n\x19\x45xplicitContentAnnotation\x12G\n\x06\x66rames\x18\x01 \x03(\x0b\x32\x37.google.cloud.videointelligence.v1.ExplicitContentFrame"Q\n\x15NormalizedBoundingBox\x12\x0c\n\x04left\x18\x01 \x01(\x02\x12\x0b\n\x03top\x18\x02 \x01(\x02\x12\r\n\x05right\x18\x03 \x01(\x02\x12\x0e\n\x06\x62ottom\x18\x04 \x01(\x02"O\n\x0b\x46\x61\x63\x65Segment\x12@\n\x07segment\x18\x01 \x01(\x0b\x32/.google.cloud.videointelligence.v1.VideoSegment"\x98\x01\n\tFaceFrame\x12[\n\x19normalized_bounding_boxes\x18\x01 \x03(\x0b\x32\x38.google.cloud.videointelligence.v1.NormalizedBoundingBox\x12.\n\x0btime_offset\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration"\xa3\x01\n\x0e\x46\x61\x63\x65\x41nnotation\x12\x11\n\tthumbnail\x18\x01 \x01(\x0c\x12@\n\x08segments\x18\x02 \x03(\x0b\x32..google.cloud.videointelligence.v1.FaceSegment\x12<\n\x06\x66rames\x18\x03 \x03(\x0b\x32,.google.cloud.videointelligence.v1.FaceFrame"\xbd\x08\n\x16VideoAnnotationResults\x12\x11\n\tinput_uri\x18\x01 \x01(\t\x12@\n\x07segment\x18\n \x01(\x0b\x32/.google.cloud.videointelligence.v1.VideoSegment\x12U\n\x19segment_label_annotations\x18\x02 \x03(\x0b\x32\x32.google.cloud.videointelligence.v1.LabelAnnotation\x12^\n"segment_presence_label_annotations\x18\x17 \x03(\x0b\x32\x32.google.cloud.videointelligence.v1.LabelAnnotation\x12R\n\x16shot_label_annotations\x18\x03 \x03(\x0b\x32\x32.google.cloud.videointelligence.v1.LabelAnnotation\x12[\n\x1fshot_presence_label_annotations\x18\x18 \x03(\x0b\x32\x32.google.cloud.videointelligence.v1.LabelAnnotation\x12S\n\x17\x66rame_label_annotations\x18\x04 \x03(\x0b\x32\x32.google.cloud.videointelligence.v1.LabelAnnotation\x12K\n\x10\x66\x61\x63\x65_annotations\x18\x05 \x03(\x0b\x32\x31.google.cloud.videointelligence.v1.FaceAnnotation\x12I\n\x10shot_annotations\x18\x06 \x03(\x0b\x32/.google.cloud.videointelligence.v1.VideoSegment\x12Y\n\x13\x65xplicit_annotation\x18\x07 \x01(\x0b\x32<.google.cloud.videointelligence.v1.ExplicitContentAnnotation\x12U\n\x15speech_transcriptions\x18\x0b \x03(\x0b\x32\x36.google.cloud.videointelligence.v1.SpeechTranscription\x12K\n\x10text_annotations\x18\x0c \x03(\x0b\x32\x31.google.cloud.videointelligence.v1.TextAnnotation\x12W\n\x12object_annotations\x18\x0e \x03(\x0b\x32;.google.cloud.videointelligence.v1.ObjectTrackingAnnotation\x12!\n\x05\x65rror\x18\t \x01(\x0b\x32\x12.google.rpc.Status"n\n\x15\x41nnotateVideoResponse\x12U\n\x12\x61nnotation_results\x18\x01 \x03(\x0b\x32\x39.google.cloud.videointelligence.v1.VideoAnnotationResults"\xa6\x02\n\x17VideoAnnotationProgress\x12\x11\n\tinput_uri\x18\x01 \x01(\t\x12\x18\n\x10progress_percent\x18\x02 \x01(\x05\x12.\n\nstart_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12;\n\x07\x66\x65\x61ture\x18\x05 \x01(\x0e\x32*.google.cloud.videointelligence.v1.Feature\x12@\n\x07segment\x18\x06 \x01(\x0b\x32/.google.cloud.videointelligence.v1.VideoSegment"p\n\x15\x41nnotateVideoProgress\x12W\n\x13\x61nnotation_progress\x18\x01 \x03(\x0b\x32:.google.cloud.videointelligence.v1.VideoAnnotationProgress"\xd4\x02\n\x19SpeechTranscriptionConfig\x12\x15\n\rlanguage_code\x18\x01 \x01(\t\x12\x18\n\x10max_alternatives\x18\x02 \x01(\x05\x12\x18\n\x10\x66ilter_profanity\x18\x03 \x01(\x08\x12I\n\x0fspeech_contexts\x18\x04 \x03(\x0b\x32\x30.google.cloud.videointelligence.v1.SpeechContext\x12$\n\x1c\x65nable_automatic_punctuation\x18\x05 \x01(\x08\x12\x14\n\x0c\x61udio_tracks\x18\x06 \x03(\x05\x12"\n\x1a\x65nable_speaker_diarization\x18\x07 \x01(\x08\x12!\n\x19\x64iarization_speaker_count\x18\x08 \x01(\x05\x12\x1e\n\x16\x65nable_word_confidence\x18\t \x01(\x08" \n\rSpeechContext\x12\x0f\n\x07phrases\x18\x01 \x03(\t"\x83\x01\n\x13SpeechTranscription\x12U\n\x0c\x61lternatives\x18\x01 \x03(\x0b\x32?.google.cloud.videointelligence.v1.SpeechRecognitionAlternative\x12\x15\n\rlanguage_code\x18\x02 \x01(\t"\x82\x01\n\x1cSpeechRecognitionAlternative\x12\x12\n\ntranscript\x18\x01 \x01(\t\x12\x12\n\nconfidence\x18\x02 \x01(\x02\x12:\n\x05words\x18\x03 \x03(\x0b\x32+.google.cloud.videointelligence.v1.WordInfo"\x9d\x01\n\x08WordInfo\x12-\n\nstart_time\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration\x12+\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x0c\n\x04word\x18\x03 \x01(\t\x12\x12\n\nconfidence\x18\x04 \x01(\x02\x12\x13\n\x0bspeaker_tag\x18\x05 \x01(\x05"(\n\x10NormalizedVertex\x12\t\n\x01x\x18\x01 \x01(\x02\x12\t\n\x01y\x18\x02 \x01(\x02"_\n\x16NormalizedBoundingPoly\x12\x45\n\x08vertices\x18\x01 \x03(\x0b\x32\x33.google.cloud.videointelligence.v1.NormalizedVertex"\xa1\x01\n\x0bTextSegment\x12@\n\x07segment\x18\x01 \x01(\x0b\x32/.google.cloud.videointelligence.v1.VideoSegment\x12\x12\n\nconfidence\x18\x02 \x01(\x02\x12<\n\x06\x66rames\x18\x03 \x03(\x0b\x32,.google.cloud.videointelligence.v1.TextFrame"\x94\x01\n\tTextFrame\x12W\n\x14rotated_bounding_box\x18\x01 \x01(\x0b\x32\x39.google.cloud.videointelligence.v1.NormalizedBoundingPoly\x12.\n\x0btime_offset\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration"`\n\x0eTextAnnotation\x12\x0c\n\x04text\x18\x01 \x01(\t\x12@\n\x08segments\x18\x02 \x03(\x0b\x32..google.cloud.videointelligence.v1.TextSegment"\xa0\x01\n\x13ObjectTrackingFrame\x12Y\n\x17normalized_bounding_box\x18\x01 \x01(\x0b\x32\x38.google.cloud.videointelligence.v1.NormalizedBoundingBox\x12.\n\x0btime_offset\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration"\x97\x02\n\x18ObjectTrackingAnnotation\x12\x42\n\x07segment\x18\x03 \x01(\x0b\x32/.google.cloud.videointelligence.v1.VideoSegmentH\x00\x12\x12\n\x08track_id\x18\x05 \x01(\x03H\x00\x12\x39\n\x06\x65ntity\x18\x01 \x01(\x0b\x32).google.cloud.videointelligence.v1.Entity\x12\x12\n\nconfidence\x18\x04 \x01(\x02\x12\x46\n\x06\x66rames\x18\x02 \x03(\x0b\x32\x36.google.cloud.videointelligence.v1.ObjectTrackingFrameB\x0c\n\ntrack_info*\xc9\x01\n\x07\x46\x65\x61ture\x12\x17\n\x13\x46\x45\x41TURE_UNSPECIFIED\x10\x00\x12\x13\n\x0fLABEL_DETECTION\x10\x01\x12\x19\n\x15SHOT_CHANGE_DETECTION\x10\x02\x12\x1e\n\x1a\x45XPLICIT_CONTENT_DETECTION\x10\x03\x12\x12\n\x0e\x46\x41\x43\x45_DETECTION\x10\x04\x12\x18\n\x14SPEECH_TRANSCRIPTION\x10\x06\x12\x12\n\x0eTEXT_DETECTION\x10\x07\x12\x13\n\x0fOBJECT_TRACKING\x10\t*r\n\x12LabelDetectionMode\x12$\n LABEL_DETECTION_MODE_UNSPECIFIED\x10\x00\x12\r\n\tSHOT_MODE\x10\x01\x12\x0e\n\nFRAME_MODE\x10\x02\x12\x17\n\x13SHOT_AND_FRAME_MODE\x10\x03*t\n\nLikelihood\x12\x1a\n\x16LIKELIHOOD_UNSPECIFIED\x10\x00\x12\x11\n\rVERY_UNLIKELY\x10\x01\x12\x0c\n\x08UNLIKELY\x10\x02\x12\x0c\n\x08POSSIBLE\x10\x03\x12\n\n\x06LIKELY\x10\x04\x12\x0f\n\x0bVERY_LIKELY\x10\x05\x32\xfa\x01\n\x18VideoIntelligenceService\x12\x87\x01\n\rAnnotateVideo\x12\x37.google.cloud.videointelligence.v1.AnnotateVideoRequest\x1a\x1d.google.longrunning.Operation"\x1e\x82\xd3\xe4\x93\x02\x18"\x13/v1/videos:annotate:\x01*\x1aT\xca\x41 videointelligence.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformB\x8b\x02\n%com.google.cloud.videointelligence.v1B\x1dVideoIntelligenceServiceProtoP\x01ZRgoogle.golang.org/genproto/googleapis/cloud/videointelligence/v1;videointelligence\xaa\x02!Google.Cloud.VideoIntelligence.V1\xca\x02!Google\\Cloud\\VideoIntelligence\\V1\xea\x02$Google::Cloud::VideoIntelligence::V1b\x06proto3' + '\n@google/cloud/videointelligence_v1/proto/video_intelligence.proto\x12!google.cloud.videointelligence.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a#google/longrunning/operations.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto"\xfe\x01\n\x14\x41nnotateVideoRequest\x12\x11\n\tinput_uri\x18\x01 \x01(\t\x12\x15\n\rinput_content\x18\x06 \x01(\x0c\x12\x41\n\x08\x66\x65\x61tures\x18\x02 \x03(\x0e\x32*.google.cloud.videointelligence.v1.FeatureB\x03\xe0\x41\x02\x12\x46\n\rvideo_context\x18\x03 \x01(\x0b\x32/.google.cloud.videointelligence.v1.VideoContext\x12\x17\n\noutput_uri\x18\x04 \x01(\tB\x03\xe0\x41\x01\x12\x18\n\x0blocation_id\x18\x05 \x01(\tB\x03\xe0\x41\x01"\xe6\x05\n\x0cVideoContext\x12\x41\n\x08segments\x18\x01 \x03(\x0b\x32/.google.cloud.videointelligence.v1.VideoSegment\x12W\n\x16label_detection_config\x18\x02 \x01(\x0b\x32\x37.google.cloud.videointelligence.v1.LabelDetectionConfig\x12\x62\n\x1cshot_change_detection_config\x18\x03 \x01(\x0b\x32<.google.cloud.videointelligence.v1.ShotChangeDetectionConfig\x12l\n!explicit_content_detection_config\x18\x04 \x01(\x0b\x32\x41.google.cloud.videointelligence.v1.ExplicitContentDetectionConfig\x12U\n\x15\x66\x61\x63\x65_detection_config\x18\x05 \x01(\x0b\x32\x36.google.cloud.videointelligence.v1.FaceDetectionConfig\x12\x61\n\x1bspeech_transcription_config\x18\x06 \x01(\x0b\x32<.google.cloud.videointelligence.v1.SpeechTranscriptionConfig\x12U\n\x15text_detection_config\x18\x08 \x01(\x0b\x32\x36.google.cloud.videointelligence.v1.TextDetectionConfig\x12W\n\x16object_tracking_config\x18\r \x01(\x0b\x32\x37.google.cloud.videointelligence.v1.ObjectTrackingConfig"\xdd\x01\n\x14LabelDetectionConfig\x12S\n\x14label_detection_mode\x18\x01 \x01(\x0e\x32\x35.google.cloud.videointelligence.v1.LabelDetectionMode\x12\x19\n\x11stationary_camera\x18\x02 \x01(\x08\x12\r\n\x05model\x18\x03 \x01(\t\x12"\n\x1a\x66rame_confidence_threshold\x18\x04 \x01(\x02\x12"\n\x1avideo_confidence_threshold\x18\x05 \x01(\x02"*\n\x19ShotChangeDetectionConfig\x12\r\n\x05model\x18\x01 \x01(\t"%\n\x14ObjectTrackingConfig\x12\r\n\x05model\x18\x01 \x01(\t"D\n\x13\x46\x61\x63\x65\x44\x65tectionConfig\x12\r\n\x05model\x18\x01 \x01(\t\x12\x1e\n\x16include_bounding_boxes\x18\x02 \x01(\x08"/\n\x1e\x45xplicitContentDetectionConfig\x12\r\n\x05model\x18\x01 \x01(\t"<\n\x13TextDetectionConfig\x12\x16\n\x0elanguage_hints\x18\x01 \x03(\t\x12\r\n\x05model\x18\x02 \x01(\t"x\n\x0cVideoSegment\x12\x34\n\x11start_time_offset\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x32\n\x0f\x65nd_time_offset\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration"d\n\x0cLabelSegment\x12@\n\x07segment\x18\x01 \x01(\x0b\x32/.google.cloud.videointelligence.v1.VideoSegment\x12\x12\n\nconfidence\x18\x02 \x01(\x02"P\n\nLabelFrame\x12.\n\x0btime_offset\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x12\n\nconfidence\x18\x02 \x01(\x02"G\n\x06\x45ntity\x12\x11\n\tentity_id\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x15\n\rlanguage_code\x18\x03 \x01(\t"\x94\x02\n\x0fLabelAnnotation\x12\x39\n\x06\x65ntity\x18\x01 \x01(\x0b\x32).google.cloud.videointelligence.v1.Entity\x12\x44\n\x11\x63\x61tegory_entities\x18\x02 \x03(\x0b\x32).google.cloud.videointelligence.v1.Entity\x12\x41\n\x08segments\x18\x03 \x03(\x0b\x32/.google.cloud.videointelligence.v1.LabelSegment\x12=\n\x06\x66rames\x18\x04 \x03(\x0b\x32-.google.cloud.videointelligence.v1.LabelFrame"\x95\x01\n\x14\x45xplicitContentFrame\x12.\n\x0btime_offset\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration\x12M\n\x16pornography_likelihood\x18\x02 \x01(\x0e\x32-.google.cloud.videointelligence.v1.Likelihood"d\n\x19\x45xplicitContentAnnotation\x12G\n\x06\x66rames\x18\x01 \x03(\x0b\x32\x37.google.cloud.videointelligence.v1.ExplicitContentFrame"Q\n\x15NormalizedBoundingBox\x12\x0c\n\x04left\x18\x01 \x01(\x02\x12\x0b\n\x03top\x18\x02 \x01(\x02\x12\r\n\x05right\x18\x03 \x01(\x02\x12\x0e\n\x06\x62ottom\x18\x04 \x01(\x02"O\n\x0b\x46\x61\x63\x65Segment\x12@\n\x07segment\x18\x01 \x01(\x0b\x32/.google.cloud.videointelligence.v1.VideoSegment"\x98\x01\n\tFaceFrame\x12[\n\x19normalized_bounding_boxes\x18\x01 \x03(\x0b\x32\x38.google.cloud.videointelligence.v1.NormalizedBoundingBox\x12.\n\x0btime_offset\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration"\xa3\x01\n\x0e\x46\x61\x63\x65\x41nnotation\x12\x11\n\tthumbnail\x18\x01 \x01(\x0c\x12@\n\x08segments\x18\x02 \x03(\x0b\x32..google.cloud.videointelligence.v1.FaceSegment\x12<\n\x06\x66rames\x18\x03 \x03(\x0b\x32,.google.cloud.videointelligence.v1.FaceFrame"\xbd\x08\n\x16VideoAnnotationResults\x12\x11\n\tinput_uri\x18\x01 \x01(\t\x12@\n\x07segment\x18\n \x01(\x0b\x32/.google.cloud.videointelligence.v1.VideoSegment\x12U\n\x19segment_label_annotations\x18\x02 \x03(\x0b\x32\x32.google.cloud.videointelligence.v1.LabelAnnotation\x12^\n"segment_presence_label_annotations\x18\x17 \x03(\x0b\x32\x32.google.cloud.videointelligence.v1.LabelAnnotation\x12R\n\x16shot_label_annotations\x18\x03 \x03(\x0b\x32\x32.google.cloud.videointelligence.v1.LabelAnnotation\x12[\n\x1fshot_presence_label_annotations\x18\x18 \x03(\x0b\x32\x32.google.cloud.videointelligence.v1.LabelAnnotation\x12S\n\x17\x66rame_label_annotations\x18\x04 \x03(\x0b\x32\x32.google.cloud.videointelligence.v1.LabelAnnotation\x12K\n\x10\x66\x61\x63\x65_annotations\x18\x05 \x03(\x0b\x32\x31.google.cloud.videointelligence.v1.FaceAnnotation\x12I\n\x10shot_annotations\x18\x06 \x03(\x0b\x32/.google.cloud.videointelligence.v1.VideoSegment\x12Y\n\x13\x65xplicit_annotation\x18\x07 \x01(\x0b\x32<.google.cloud.videointelligence.v1.ExplicitContentAnnotation\x12U\n\x15speech_transcriptions\x18\x0b \x03(\x0b\x32\x36.google.cloud.videointelligence.v1.SpeechTranscription\x12K\n\x10text_annotations\x18\x0c \x03(\x0b\x32\x31.google.cloud.videointelligence.v1.TextAnnotation\x12W\n\x12object_annotations\x18\x0e \x03(\x0b\x32;.google.cloud.videointelligence.v1.ObjectTrackingAnnotation\x12!\n\x05\x65rror\x18\t \x01(\x0b\x32\x12.google.rpc.Status"n\n\x15\x41nnotateVideoResponse\x12U\n\x12\x61nnotation_results\x18\x01 \x03(\x0b\x32\x39.google.cloud.videointelligence.v1.VideoAnnotationResults"\xa6\x02\n\x17VideoAnnotationProgress\x12\x11\n\tinput_uri\x18\x01 \x01(\t\x12\x18\n\x10progress_percent\x18\x02 \x01(\x05\x12.\n\nstart_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12;\n\x07\x66\x65\x61ture\x18\x05 \x01(\x0e\x32*.google.cloud.videointelligence.v1.Feature\x12@\n\x07segment\x18\x06 \x01(\x0b\x32/.google.cloud.videointelligence.v1.VideoSegment"p\n\x15\x41nnotateVideoProgress\x12W\n\x13\x61nnotation_progress\x18\x01 \x03(\x0b\x32:.google.cloud.videointelligence.v1.VideoAnnotationProgress"\x81\x03\n\x19SpeechTranscriptionConfig\x12\x1a\n\rlanguage_code\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x1d\n\x10max_alternatives\x18\x02 \x01(\x05\x42\x03\xe0\x41\x01\x12\x1d\n\x10\x66ilter_profanity\x18\x03 \x01(\x08\x42\x03\xe0\x41\x01\x12N\n\x0fspeech_contexts\x18\x04 \x03(\x0b\x32\x30.google.cloud.videointelligence.v1.SpeechContextB\x03\xe0\x41\x01\x12)\n\x1c\x65nable_automatic_punctuation\x18\x05 \x01(\x08\x42\x03\xe0\x41\x01\x12\x19\n\x0c\x61udio_tracks\x18\x06 \x03(\x05\x42\x03\xe0\x41\x01\x12\'\n\x1a\x65nable_speaker_diarization\x18\x07 \x01(\x08\x42\x03\xe0\x41\x01\x12&\n\x19\x64iarization_speaker_count\x18\x08 \x01(\x05\x42\x03\xe0\x41\x01\x12#\n\x16\x65nable_word_confidence\x18\t \x01(\x08\x42\x03\xe0\x41\x01"%\n\rSpeechContext\x12\x14\n\x07phrases\x18\x01 \x03(\tB\x03\xe0\x41\x01"\x88\x01\n\x13SpeechTranscription\x12U\n\x0c\x61lternatives\x18\x01 \x03(\x0b\x32?.google.cloud.videointelligence.v1.SpeechRecognitionAlternative\x12\x1a\n\rlanguage_code\x18\x02 \x01(\tB\x03\xe0\x41\x03"\x8c\x01\n\x1cSpeechRecognitionAlternative\x12\x12\n\ntranscript\x18\x01 \x01(\t\x12\x17\n\nconfidence\x18\x02 \x01(\x02\x42\x03\xe0\x41\x03\x12?\n\x05words\x18\x03 \x03(\x0b\x32+.google.cloud.videointelligence.v1.WordInfoB\x03\xe0\x41\x03"\xa7\x01\n\x08WordInfo\x12-\n\nstart_time\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration\x12+\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x0c\n\x04word\x18\x03 \x01(\t\x12\x17\n\nconfidence\x18\x04 \x01(\x02\x42\x03\xe0\x41\x03\x12\x18\n\x0bspeaker_tag\x18\x05 \x01(\x05\x42\x03\xe0\x41\x03"(\n\x10NormalizedVertex\x12\t\n\x01x\x18\x01 \x01(\x02\x12\t\n\x01y\x18\x02 \x01(\x02"_\n\x16NormalizedBoundingPoly\x12\x45\n\x08vertices\x18\x01 \x03(\x0b\x32\x33.google.cloud.videointelligence.v1.NormalizedVertex"\xa1\x01\n\x0bTextSegment\x12@\n\x07segment\x18\x01 \x01(\x0b\x32/.google.cloud.videointelligence.v1.VideoSegment\x12\x12\n\nconfidence\x18\x02 \x01(\x02\x12<\n\x06\x66rames\x18\x03 \x03(\x0b\x32,.google.cloud.videointelligence.v1.TextFrame"\x94\x01\n\tTextFrame\x12W\n\x14rotated_bounding_box\x18\x01 \x01(\x0b\x32\x39.google.cloud.videointelligence.v1.NormalizedBoundingPoly\x12.\n\x0btime_offset\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration"`\n\x0eTextAnnotation\x12\x0c\n\x04text\x18\x01 \x01(\t\x12@\n\x08segments\x18\x02 \x03(\x0b\x32..google.cloud.videointelligence.v1.TextSegment"\xa0\x01\n\x13ObjectTrackingFrame\x12Y\n\x17normalized_bounding_box\x18\x01 \x01(\x0b\x32\x38.google.cloud.videointelligence.v1.NormalizedBoundingBox\x12.\n\x0btime_offset\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration"\x97\x02\n\x18ObjectTrackingAnnotation\x12\x42\n\x07segment\x18\x03 \x01(\x0b\x32/.google.cloud.videointelligence.v1.VideoSegmentH\x00\x12\x12\n\x08track_id\x18\x05 \x01(\x03H\x00\x12\x39\n\x06\x65ntity\x18\x01 \x01(\x0b\x32).google.cloud.videointelligence.v1.Entity\x12\x12\n\nconfidence\x18\x04 \x01(\x02\x12\x46\n\x06\x66rames\x18\x02 \x03(\x0b\x32\x36.google.cloud.videointelligence.v1.ObjectTrackingFrameB\x0c\n\ntrack_info*\xc9\x01\n\x07\x46\x65\x61ture\x12\x17\n\x13\x46\x45\x41TURE_UNSPECIFIED\x10\x00\x12\x13\n\x0fLABEL_DETECTION\x10\x01\x12\x19\n\x15SHOT_CHANGE_DETECTION\x10\x02\x12\x1e\n\x1a\x45XPLICIT_CONTENT_DETECTION\x10\x03\x12\x12\n\x0e\x46\x41\x43\x45_DETECTION\x10\x04\x12\x18\n\x14SPEECH_TRANSCRIPTION\x10\x06\x12\x12\n\x0eTEXT_DETECTION\x10\x07\x12\x13\n\x0fOBJECT_TRACKING\x10\t*r\n\x12LabelDetectionMode\x12$\n LABEL_DETECTION_MODE_UNSPECIFIED\x10\x00\x12\r\n\tSHOT_MODE\x10\x01\x12\x0e\n\nFRAME_MODE\x10\x02\x12\x17\n\x13SHOT_AND_FRAME_MODE\x10\x03*t\n\nLikelihood\x12\x1a\n\x16LIKELIHOOD_UNSPECIFIED\x10\x00\x12\x11\n\rVERY_UNLIKELY\x10\x01\x12\x0c\n\x08UNLIKELY\x10\x02\x12\x0c\n\x08POSSIBLE\x10\x03\x12\n\n\x06LIKELY\x10\x04\x12\x0f\n\x0bVERY_LIKELY\x10\x05\x32\xc0\x02\n\x18VideoIntelligenceService\x12\xcd\x01\n\rAnnotateVideo\x12\x37.google.cloud.videointelligence.v1.AnnotateVideoRequest\x1a\x1d.google.longrunning.Operation"d\x82\xd3\xe4\x93\x02\x18"\x13/v1/videos:annotate:\x01*\xda\x41\x12input_uri,features\xca\x41.\n\x15\x41nnotateVideoResponse\x12\x15\x41nnotateVideoProgress\x1aT\xca\x41 videointelligence.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformB\x8b\x02\n%com.google.cloud.videointelligence.v1B\x1dVideoIntelligenceServiceProtoP\x01ZRgoogle.golang.org/genproto/googleapis/cloud/videointelligence/v1;videointelligence\xaa\x02!Google.Cloud.VideoIntelligence.V1\xca\x02!Google\\Cloud\\VideoIntelligence\\V1\xea\x02$Google::Cloud::VideoIntelligence::V1b\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, google_dot_api_dot_client__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, google_dot_longrunning_dot_operations__pb2.DESCRIPTOR, google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, @@ -103,8 +105,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=6570, - serialized_end=6771, + serialized_start=6693, + serialized_end=6894, ) _sym_db.RegisterEnumDescriptor(_FEATURE) @@ -138,8 +140,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=6773, - serialized_end=6887, + serialized_start=6896, + serialized_end=7010, ) _sym_db.RegisterEnumDescriptor(_LABELDETECTIONMODE) @@ -175,8 +177,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=6889, - serialized_end=7005, + serialized_start=7012, + serialized_end=7128, ) _sym_db.RegisterEnumDescriptor(_LIKELIHOOD) @@ -259,7 +261,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -295,7 +297,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -313,7 +315,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -325,8 +327,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=286, - serialized_end=525, + serialized_start=319, + serialized_end=573, ) @@ -490,8 +492,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=528, - serialized_end=1270, + serialized_start=576, + serialized_end=1318, ) @@ -601,8 +603,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1273, - serialized_end=1494, + serialized_start=1321, + serialized_end=1542, ) @@ -640,21 +642,21 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1496, - serialized_end=1538, + serialized_start=1544, + serialized_end=1586, ) -_EXPLICITCONTENTDETECTIONCONFIG = _descriptor.Descriptor( - name="ExplicitContentDetectionConfig", - full_name="google.cloud.videointelligence.v1.ExplicitContentDetectionConfig", +_OBJECTTRACKINGCONFIG = _descriptor.Descriptor( + name="ObjectTrackingConfig", + full_name="google.cloud.videointelligence.v1.ObjectTrackingConfig", filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name="model", - full_name="google.cloud.videointelligence.v1.ExplicitContentDetectionConfig.model", + full_name="google.cloud.videointelligence.v1.ObjectTrackingConfig.model", index=0, number=1, type=9, @@ -679,8 +681,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1540, - serialized_end=1587, + serialized_start=1588, + serialized_end=1625, ) @@ -736,21 +738,21 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1589, - serialized_end=1657, + serialized_start=1627, + serialized_end=1695, ) -_OBJECTTRACKINGCONFIG = _descriptor.Descriptor( - name="ObjectTrackingConfig", - full_name="google.cloud.videointelligence.v1.ObjectTrackingConfig", +_EXPLICITCONTENTDETECTIONCONFIG = _descriptor.Descriptor( + name="ExplicitContentDetectionConfig", + full_name="google.cloud.videointelligence.v1.ExplicitContentDetectionConfig", filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name="model", - full_name="google.cloud.videointelligence.v1.ObjectTrackingConfig.model", + full_name="google.cloud.videointelligence.v1.ExplicitContentDetectionConfig.model", index=0, number=1, type=9, @@ -775,8 +777,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1659, - serialized_end=1696, + serialized_start=1697, + serialized_end=1744, ) @@ -832,8 +834,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1698, - serialized_end=1758, + serialized_start=1746, + serialized_end=1806, ) @@ -889,8 +891,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1760, - serialized_end=1880, + serialized_start=1808, + serialized_end=1928, ) @@ -946,8 +948,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1882, - serialized_end=1982, + serialized_start=1930, + serialized_end=2030, ) @@ -1003,8 +1005,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1984, - serialized_end=2064, + serialized_start=2032, + serialized_end=2112, ) @@ -1078,8 +1080,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2066, - serialized_end=2137, + serialized_start=2114, + serialized_end=2185, ) @@ -1171,8 +1173,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2140, - serialized_end=2416, + serialized_start=2188, + serialized_end=2464, ) @@ -1228,8 +1230,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2419, - serialized_end=2568, + serialized_start=2467, + serialized_end=2616, ) @@ -1267,8 +1269,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2570, - serialized_end=2670, + serialized_start=2618, + serialized_end=2718, ) @@ -1360,8 +1362,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2672, - serialized_end=2753, + serialized_start=2720, + serialized_end=2801, ) @@ -1399,8 +1401,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2755, - serialized_end=2834, + serialized_start=2803, + serialized_end=2882, ) @@ -1456,8 +1458,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2837, - serialized_end=2989, + serialized_start=2885, + serialized_end=3037, ) @@ -1531,8 +1533,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2992, - serialized_end=3155, + serialized_start=3040, + serialized_end=3203, ) @@ -1804,8 +1806,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3158, - serialized_end=4243, + serialized_start=3206, + serialized_end=4291, ) @@ -1843,8 +1845,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4245, - serialized_end=4355, + serialized_start=4293, + serialized_end=4403, ) @@ -1972,8 +1974,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4358, - serialized_end=4652, + serialized_start=4406, + serialized_end=4700, ) @@ -2011,8 +2013,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4654, - serialized_end=4766, + serialized_start=4702, + serialized_end=4814, ) @@ -2038,7 +2040,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2056,7 +2058,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2074,7 +2076,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2092,7 +2094,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2110,7 +2112,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2128,7 +2130,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2146,7 +2148,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2164,7 +2166,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2182,7 +2184,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ), ], @@ -2194,8 +2196,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4769, - serialized_end=5109, + serialized_start=4817, + serialized_end=5202, ) @@ -2221,7 +2223,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\001"), file=DESCRIPTOR, ) ], @@ -2233,8 +2235,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5111, - serialized_end=5143, + serialized_start=5204, + serialized_end=5241, ) @@ -2278,7 +2280,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], @@ -2290,8 +2292,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5146, - serialized_end=5277, + serialized_start=5244, + serialized_end=5380, ) @@ -2335,7 +2337,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2353,7 +2355,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], @@ -2365,8 +2367,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5280, - serialized_end=5410, + serialized_start=5383, + serialized_end=5523, ) @@ -2446,7 +2448,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2464,7 +2466,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\003"), file=DESCRIPTOR, ), ], @@ -2476,8 +2478,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5413, - serialized_end=5570, + serialized_start=5526, + serialized_end=5693, ) @@ -2533,8 +2535,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5572, - serialized_end=5612, + serialized_start=5695, + serialized_end=5735, ) @@ -2572,8 +2574,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5614, - serialized_end=5709, + serialized_start=5737, + serialized_end=5832, ) @@ -2647,8 +2649,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5712, - serialized_end=5873, + serialized_start=5835, + serialized_end=5996, ) @@ -2704,8 +2706,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5876, - serialized_end=6024, + serialized_start=5999, + serialized_end=6147, ) @@ -2761,8 +2763,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=6026, - serialized_end=6122, + serialized_start=6149, + serialized_end=6245, ) @@ -2818,8 +2820,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=6125, - serialized_end=6285, + serialized_start=6248, + serialized_end=6408, ) @@ -2937,8 +2939,8 @@ fields=[], ) ], - serialized_start=6288, - serialized_end=6567, + serialized_start=6411, + serialized_end=6690, ) _ANNOTATEVIDEOREQUEST.fields_by_name["features"].enum_type = _FEATURE @@ -3093,11 +3095,11 @@ DESCRIPTOR.message_types_by_name[ "ShotChangeDetectionConfig" ] = _SHOTCHANGEDETECTIONCONFIG +DESCRIPTOR.message_types_by_name["ObjectTrackingConfig"] = _OBJECTTRACKINGCONFIG +DESCRIPTOR.message_types_by_name["FaceDetectionConfig"] = _FACEDETECTIONCONFIG DESCRIPTOR.message_types_by_name[ "ExplicitContentDetectionConfig" ] = _EXPLICITCONTENTDETECTIONCONFIG -DESCRIPTOR.message_types_by_name["FaceDetectionConfig"] = _FACEDETECTIONCONFIG -DESCRIPTOR.message_types_by_name["ObjectTrackingConfig"] = _OBJECTTRACKINGCONFIG DESCRIPTOR.message_types_by_name["TextDetectionConfig"] = _TEXTDETECTIONCONFIG DESCRIPTOR.message_types_by_name["VideoSegment"] = _VIDEOSEGMENT DESCRIPTOR.message_types_by_name["LabelSegment"] = _LABELSEGMENT @@ -3165,11 +3167,11 @@ specified via ``input_uri``. If set, ``input_uri`` should be unset. features: - Requested video annotation features. + Required. Requested video annotation features. video_context: Additional video context and/or feature-specific parameters. output_uri: - Optional location where the output (in JSON format) should be + Optional. Location where the output (in JSON format) should be stored. Currently, only `Google Cloud Storage `__ URIs are supported, which must be specified in the following format: @@ -3178,7 +3180,7 @@ ]). For more information, see `Request URIs `__. location_id: - Optional cloud region where annotation should take place. + Optional. Cloud region where annotation should take place. Supported cloud regions: ``us-east1``, ``us-west1``, ``europe- west1``, ``asia-east1``. If no region is specified, a region will be determined based on video file location. @@ -3285,24 +3287,24 @@ ) _sym_db.RegisterMessage(ShotChangeDetectionConfig) -ExplicitContentDetectionConfig = _reflection.GeneratedProtocolMessageType( - "ExplicitContentDetectionConfig", +ObjectTrackingConfig = _reflection.GeneratedProtocolMessageType( + "ObjectTrackingConfig", (_message.Message,), dict( - DESCRIPTOR=_EXPLICITCONTENTDETECTIONCONFIG, + DESCRIPTOR=_OBJECTTRACKINGCONFIG, __module__="google.cloud.videointelligence_v1.proto.video_intelligence_pb2", - __doc__="""Config for EXPLICIT\_CONTENT\_DETECTION. + __doc__="""Config for OBJECT\_TRACKING. Attributes: model: - Model to use for explicit content detection. Supported values: + Model to use for object tracking. Supported values: "builtin/stable" (the default if unset) and "builtin/latest". """, - # @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1.ExplicitContentDetectionConfig) + # @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1.ObjectTrackingConfig) ), ) -_sym_db.RegisterMessage(ExplicitContentDetectionConfig) +_sym_db.RegisterMessage(ObjectTrackingConfig) FaceDetectionConfig = _reflection.GeneratedProtocolMessageType( "FaceDetectionConfig", @@ -3326,24 +3328,24 @@ ) _sym_db.RegisterMessage(FaceDetectionConfig) -ObjectTrackingConfig = _reflection.GeneratedProtocolMessageType( - "ObjectTrackingConfig", +ExplicitContentDetectionConfig = _reflection.GeneratedProtocolMessageType( + "ExplicitContentDetectionConfig", (_message.Message,), dict( - DESCRIPTOR=_OBJECTTRACKINGCONFIG, + DESCRIPTOR=_EXPLICITCONTENTDETECTIONCONFIG, __module__="google.cloud.videointelligence_v1.proto.video_intelligence_pb2", - __doc__="""Config for OBJECT\_TRACKING. + __doc__="""Config for EXPLICIT\_CONTENT\_DETECTION. Attributes: model: - Model to use for object tracking. Supported values: + Model to use for explicit content detection. Supported values: "builtin/stable" (the default if unset) and "builtin/latest". """, - # @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1.ObjectTrackingConfig) + # @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1.ExplicitContentDetectionConfig) ), ) -_sym_db.RegisterMessage(ObjectTrackingConfig) +_sym_db.RegisterMessage(ExplicitContentDetectionConfig) TextDetectionConfig = _reflection.GeneratedProtocolMessageType( "TextDetectionConfig", @@ -3635,15 +3637,23 @@ segment_presence_label_annotations: Presence label annotations on video level or user specified segment level. There is exactly one element for each unique - label. This will eventually get publicly exposed and the - restriction will be removed. + label. Compared to the existing topical + ``segment_label_annotations``, this field presents more fine- + grained, segment-level labels detected in video content and is + made available only when the client sets + ``LabelDetectionConfig.model`` to "builtin/latest" in the + request. shot_label_annotations: Topical label annotations on shot level. There is exactly one element for each unique label. shot_presence_label_annotations: Presence label annotations on shot level. There is exactly one - element for each unique label. This will eventually get - publicly exposed and the restriction will be removed. + element for each unique label. Compared to the existing + topical ``shot_label_annotations``, this field presents more + fine-grained, shot-level labels detected in video content and + is made available only when the client sets + ``LabelDetectionConfig.model`` to "builtin/latest" in the + request. frame_label_annotations: Label annotations on frame level. There is exactly one element for each unique label. @@ -3757,13 +3767,13 @@ Attributes: language_code: - *Required* The language of the supplied audio as a `BCP-47 - `__ language - tag. Example: "en-US". See `Language Support + Required. *Required* The language of the supplied audio as a + `BCP-47 `__ + language tag. Example: "en-US". See `Language Support `__ for a list of the currently supported language codes. max_alternatives: - *Optional* Maximum number of recognition hypotheses to be + Optional. Maximum number of recognition hypotheses to be returned. Specifically, the maximum number of ``SpeechRecognitionAlternative`` messages within each ``SpeechTranscription``. The server may return fewer than @@ -3771,16 +3781,16 @@ of ``0`` or ``1`` will return a maximum of one. If omitted, will return a maximum of one. filter_profanity: - *Optional* If set to ``true``, the server will attempt to + Optional. If set to ``true``, the server will attempt to filter out profanities, replacing all but the initial character in each filtered word with asterisks, e.g. "f\*\*\*". If set to ``false`` or omitted, profanities won't be filtered out. speech_contexts: - *Optional* A means to provide context to assist the speech + Optional. A means to provide context to assist the speech recognition. enable_automatic_punctuation: - *Optional* If 'true', adds punctuation to recognition result + Optional. If 'true', adds punctuation to recognition result hypotheses. This feature is only available in select languages. Setting this for requests in other languages has no effect at all. The default 'false' value does not add @@ -3789,11 +3799,11 @@ users. In the future this may be exclusively available as a premium feature." audio_tracks: - *Optional* For file formats, such as MXF or MKV, supporting + Optional. For file formats, such as MXF or MKV, supporting multiple audio tracks, specify up to two tracks. Default: track 0. enable_speaker_diarization: - *Optional* If 'true', enables speaker detection for each + Optional. If 'true', enables speaker detection for each recognized word in the top alternative of the recognition result using a speaker\_tag provided in the WordInfo. Note: When this is true, we send all the words from the beginning of @@ -3802,13 +3812,13 @@ as our models learn to identify the speakers in the conversation over time. diarization_speaker_count: - *Optional* If set, specifies the estimated number of speakers + Optional. If set, specifies the estimated number of speakers in the conversation. If not set, defaults to '2'. Ignored unless enable\_speaker\_diarization is set to true. enable_word_confidence: - *Optional* If ``true``, the top result includes a list of - words and the confidence for those words. If ``false``, no - word-level confidence information is returned. The default is + Optional. If ``true``, the top result includes a list of words + and the confidence for those words. If ``false``, no word- + level confidence information is returned. The default is ``false``. """, # @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1.SpeechTranscriptionConfig) @@ -3828,7 +3838,7 @@ Attributes: phrases: - *Optional* A list of strings containing words and phrases + Optional. A list of strings containing words and phrases "hints" so that the speech recognition is more likely to recognize them. This can be used to improve the accuracy for specific words and phrases, for example, if specific commands @@ -4124,6 +4134,26 @@ DESCRIPTOR._options = None +_ANNOTATEVIDEOREQUEST.fields_by_name["features"]._options = None +_ANNOTATEVIDEOREQUEST.fields_by_name["output_uri"]._options = None +_ANNOTATEVIDEOREQUEST.fields_by_name["location_id"]._options = None +_SPEECHTRANSCRIPTIONCONFIG.fields_by_name["language_code"]._options = None +_SPEECHTRANSCRIPTIONCONFIG.fields_by_name["max_alternatives"]._options = None +_SPEECHTRANSCRIPTIONCONFIG.fields_by_name["filter_profanity"]._options = None +_SPEECHTRANSCRIPTIONCONFIG.fields_by_name["speech_contexts"]._options = None +_SPEECHTRANSCRIPTIONCONFIG.fields_by_name[ + "enable_automatic_punctuation" +]._options = None +_SPEECHTRANSCRIPTIONCONFIG.fields_by_name["audio_tracks"]._options = None +_SPEECHTRANSCRIPTIONCONFIG.fields_by_name["enable_speaker_diarization"]._options = None +_SPEECHTRANSCRIPTIONCONFIG.fields_by_name["diarization_speaker_count"]._options = None +_SPEECHTRANSCRIPTIONCONFIG.fields_by_name["enable_word_confidence"]._options = None +_SPEECHCONTEXT.fields_by_name["phrases"]._options = None +_SPEECHTRANSCRIPTION.fields_by_name["language_code"]._options = None +_SPEECHRECOGNITIONALTERNATIVE.fields_by_name["confidence"]._options = None +_SPEECHRECOGNITIONALTERNATIVE.fields_by_name["words"]._options = None +_WORDINFO.fields_by_name["confidence"]._options = None +_WORDINFO.fields_by_name["speaker_tag"]._options = None _VIDEOINTELLIGENCESERVICE = _descriptor.ServiceDescriptor( name="VideoIntelligenceService", @@ -4133,8 +4163,8 @@ serialized_options=_b( "\312A videointelligence.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform" ), - serialized_start=7008, - serialized_end=7258, + serialized_start=7131, + serialized_end=7451, methods=[ _descriptor.MethodDescriptor( name="AnnotateVideo", @@ -4144,7 +4174,7 @@ input_type=_ANNOTATEVIDEOREQUEST, output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, serialized_options=_b( - '\202\323\344\223\002\030"\023/v1/videos:annotate:\001*' + '\202\323\344\223\002\030"\023/v1/videos:annotate:\001*\332A\022input_uri,features\312A.\n\025AnnotateVideoResponse\022\025AnnotateVideoProgress' ), ) ], diff --git a/videointelligence/google/cloud/videointelligence_v1beta1/gapic/enums.py b/videointelligence/google/cloud/videointelligence_v1beta1/gapic/enums.py deleted file mode 100644 index 7a13aca21272..000000000000 --- a/videointelligence/google/cloud/videointelligence_v1beta1/gapic/enums.py +++ /dev/null @@ -1,96 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Wrappers for protocol buffer enum types.""" - -import enum - - -class Feature(enum.IntEnum): - """ - Video annotation feature. - - Attributes: - FEATURE_UNSPECIFIED (int): Unspecified. - LABEL_DETECTION (int): Label detection. Detect objects, such as dog or flower. - FACE_DETECTION (int): Human face detection and tracking. - SHOT_CHANGE_DETECTION (int): Shot change detection. - SAFE_SEARCH_DETECTION (int): Safe search detection. - """ - - FEATURE_UNSPECIFIED = 0 - LABEL_DETECTION = 1 - FACE_DETECTION = 2 - SHOT_CHANGE_DETECTION = 3 - SAFE_SEARCH_DETECTION = 4 - - -class LabelDetectionMode(enum.IntEnum): - """ - Label detection mode. - - Attributes: - LABEL_DETECTION_MODE_UNSPECIFIED (int): Unspecified. - SHOT_MODE (int): Detect shot-level labels. - FRAME_MODE (int): Detect frame-level labels. - SHOT_AND_FRAME_MODE (int): Detect both shot-level and frame-level labels. - """ - - LABEL_DETECTION_MODE_UNSPECIFIED = 0 - SHOT_MODE = 1 - FRAME_MODE = 2 - SHOT_AND_FRAME_MODE = 3 - - -class LabelLevel(enum.IntEnum): - """ - Label level (scope). - - Attributes: - LABEL_LEVEL_UNSPECIFIED (int): Unspecified. - VIDEO_LEVEL (int): Video-level. Corresponds to the whole video. - SEGMENT_LEVEL (int): Segment-level. Corresponds to one of ``AnnotateSpec.segments``. - SHOT_LEVEL (int): Shot-level. Corresponds to a single shot (i.e. a series of frames - without a major camera position or background change). - FRAME_LEVEL (int): Frame-level. Corresponds to a single video frame. - """ - - LABEL_LEVEL_UNSPECIFIED = 0 - VIDEO_LEVEL = 1 - SEGMENT_LEVEL = 2 - SHOT_LEVEL = 3 - FRAME_LEVEL = 4 - - -class Likelihood(enum.IntEnum): - """ - Bucketized representation of likelihood. - - Attributes: - UNKNOWN (int): Unknown likelihood. - VERY_UNLIKELY (int): Very unlikely. - UNLIKELY (int): Unlikely. - POSSIBLE (int): Possible. - LIKELY (int): Likely. - VERY_LIKELY (int): Very likely. - """ - - UNKNOWN = 0 - VERY_UNLIKELY = 1 - UNLIKELY = 2 - POSSIBLE = 3 - LIKELY = 4 - VERY_LIKELY = 5 diff --git a/videointelligence/google/cloud/videointelligence_v1beta1/proto/video_intelligence.proto b/videointelligence/google/cloud/videointelligence_v1beta1/proto/video_intelligence.proto deleted file mode 100644 index 430776bf0031..000000000000 --- a/videointelligence/google/cloud/videointelligence_v1beta1/proto/video_intelligence.proto +++ /dev/null @@ -1,345 +0,0 @@ -// Copyright 2017 Google Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.videointelligence.v1beta1; - -import "google/api/annotations.proto"; -import "google/longrunning/operations.proto"; -import "google/protobuf/timestamp.proto"; -import "google/rpc/status.proto"; - -option csharp_namespace = "Google.Cloud.VideoIntelligence.V1Beta1"; -option go_package = "google.golang.org/genproto/googleapis/cloud/videointelligence/v1beta1;videointelligence"; -option java_multiple_files = true; -option java_outer_classname = "VideoIntelligenceServiceProto"; -option java_package = "com.google.cloud.videointelligence.v1beta1"; -option php_namespace = "Google\\Cloud\\VideoIntelligence\\V1beta1"; -option ruby_package = "Google::Cloud::VideoIntelligence::V1beta1"; - -// Service that implements Google Cloud Video Intelligence API. -service VideoIntelligenceService { - // Performs asynchronous video annotation. Progress and results can be - // retrieved through the `google.longrunning.Operations` interface. - // `Operation.metadata` contains `AnnotateVideoProgress` (progress). - // `Operation.response` contains `AnnotateVideoResponse` (results). - rpc AnnotateVideo(AnnotateVideoRequest) - returns (google.longrunning.Operation) { - option (google.api.http) = { - post: "/v1beta1/videos:annotate" - body: "*" - }; - } -} - -// Video annotation request. -message AnnotateVideoRequest { - // Input video location. Currently, only - // [Google Cloud Storage](https://cloud.google.com/storage/) URIs are - // supported, which must be specified in the following format: - // `gs://bucket-id/object-id` (other URI formats return - // [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]). For - // more information, see [Request URIs](/storage/docs/reference-uris). A video - // URI may include wildcards in `object-id`, and thus identify multiple - // videos. Supported wildcards: '*' to match 0 or more characters; - // '?' to match 1 character. If unset, the input video should be embedded - // in the request as `input_content`. If set, `input_content` should be unset. - string input_uri = 1; - - // The video data bytes. Encoding: base64. If unset, the input video(s) - // should be specified via `input_uri`. If set, `input_uri` should be unset. - string input_content = 6; - - // Requested video annotation features. - repeated Feature features = 2; - - // Additional video context and/or feature-specific parameters. - VideoContext video_context = 3; - - // Optional location where the output (in JSON format) should be stored. - // Currently, only [Google Cloud Storage](https://cloud.google.com/storage/) - // URIs are supported, which must be specified in the following format: - // `gs://bucket-id/object-id` (other URI formats return - // [google.rpc.Code.INVALID_ARGUMENT][google.rpc.Code.INVALID_ARGUMENT]). For - // more information, see [Request URIs](/storage/docs/reference-uris). - string output_uri = 4; - - // Optional cloud region where annotation should take place. Supported cloud - // regions: `us-east1`, `us-west1`, `europe-west1`, `asia-east1`. If no region - // is specified, a region will be determined based on video file location. - string location_id = 5; -} - -// Video context and/or feature-specific parameters. -message VideoContext { - // Video segments to annotate. The segments may overlap and are not required - // to be contiguous or span the whole video. If unspecified, each video - // is treated as a single segment. - repeated VideoSegment segments = 1; - - // If label detection has been requested, what labels should be detected - // in addition to video-level labels or segment-level labels. If unspecified, - // defaults to `SHOT_MODE`. - LabelDetectionMode label_detection_mode = 2; - - // Whether the video has been shot from a stationary (i.e. non-moving) camera. - // When set to true, might improve detection accuracy for moving objects. - bool stationary_camera = 3; - - // Model to use for label detection. - // Supported values: "latest" and "stable" (the default). - string label_detection_model = 4; - - // Model to use for face detection. - // Supported values: "latest" and "stable" (the default). - string face_detection_model = 5; - - // Model to use for shot change detection. - // Supported values: "latest" and "stable" (the default). - string shot_change_detection_model = 6; - - // Model to use for safe search detection. - // Supported values: "latest" and "stable" (the default). - string safe_search_detection_model = 7; -} - -// Video segment. -message VideoSegment { - // Start offset in microseconds (inclusive). Unset means 0. - int64 start_time_offset = 1; - - // End offset in microseconds (inclusive). Unset means 0. - int64 end_time_offset = 2; -} - -// Label location. -message LabelLocation { - // Video segment. Set to [-1, -1] for video-level labels. - // Set to [timestamp, timestamp] for frame-level labels. - // Otherwise, corresponds to one of `AnnotateSpec.segments` - // (if specified) or to shot boundaries (if requested). - VideoSegment segment = 1; - - // Confidence that the label is accurate. Range: [0, 1]. - float confidence = 2; - - // Label level. - LabelLevel level = 3; -} - -// Label annotation. -message LabelAnnotation { - // Textual description, e.g. `Fixed-gear bicycle`. - string description = 1; - - // Language code for `description` in BCP-47 format. - string language_code = 2; - - // Where the label was detected and with what confidence. - repeated LabelLocation locations = 3; -} - -// Safe search annotation (based on per-frame visual signals only). -// If no unsafe content has been detected in a frame, no annotations -// are present for that frame. If only some types of unsafe content -// have been detected in a frame, the likelihood is set to `UNKNOWN` -// for all other types of unsafe content. -message SafeSearchAnnotation { - // Likelihood of adult content. - Likelihood adult = 1; - - // Likelihood that an obvious modification was made to the original - // version to make it appear funny or offensive. - Likelihood spoof = 2; - - // Likelihood of medical content. - Likelihood medical = 3; - - // Likelihood of violent content. - Likelihood violent = 4; - - // Likelihood of racy content. - Likelihood racy = 5; - - // Video time offset in microseconds. - int64 time_offset = 6; -} - -// Bounding box. -message BoundingBox { - // Left X coordinate. - int32 left = 1; - - // Right X coordinate. - int32 right = 2; - - // Bottom Y coordinate. - int32 bottom = 3; - - // Top Y coordinate. - int32 top = 4; -} - -// Face location. -message FaceLocation { - // Bounding box in a frame. - BoundingBox bounding_box = 1; - - // Video time offset in microseconds. - int64 time_offset = 2; -} - -// Face annotation. -message FaceAnnotation { - // Thumbnail of a representative face view (in JPEG format). Encoding: base64. - string thumbnail = 1; - - // All locations where a face was detected. - // Faces are detected and tracked on a per-video basis - // (as opposed to across multiple videos). - repeated VideoSegment segments = 2; - - // Face locations at one frame per second. - repeated FaceLocation locations = 3; -} - -// Annotation results for a single video. -message VideoAnnotationResults { - // Video file location in - // [Google Cloud Storage](https://cloud.google.com/storage/). - string input_uri = 1; - - // Label annotations. There is exactly one element for each unique label. - repeated LabelAnnotation label_annotations = 2; - - // Face annotations. There is exactly one element for each unique face. - repeated FaceAnnotation face_annotations = 3; - - // Shot annotations. Each shot is represented as a video segment. - repeated VideoSegment shot_annotations = 4; - - // Safe search annotations. - repeated SafeSearchAnnotation safe_search_annotations = 6; - - // If set, indicates an error. Note that for a single `AnnotateVideoRequest` - // some videos may succeed and some may fail. - google.rpc.Status error = 5; -} - -// Video annotation response. Included in the `response` -// field of the `Operation` returned by the `GetOperation` -// call of the `google::longrunning::Operations` service. -message AnnotateVideoResponse { - // Annotation results for all videos specified in `AnnotateVideoRequest`. - repeated VideoAnnotationResults annotation_results = 1; -} - -// Annotation progress for a single video. -message VideoAnnotationProgress { - // Video file location in - // [Google Cloud Storage](https://cloud.google.com/storage/). - string input_uri = 1; - - // Approximate percentage processed thus far. - // Guaranteed to be 100 when fully processed. - int32 progress_percent = 2; - - // Time when the request was received. - google.protobuf.Timestamp start_time = 3; - - // Time of the most recent update. - google.protobuf.Timestamp update_time = 4; -} - -// Video annotation progress. Included in the `metadata` -// field of the `Operation` returned by the `GetOperation` -// call of the `google::longrunning::Operations` service. -message AnnotateVideoProgress { - // Progress metadata for all videos specified in `AnnotateVideoRequest`. - repeated VideoAnnotationProgress annotation_progress = 1; -} - -// Video annotation feature. -enum Feature { - // Unspecified. - FEATURE_UNSPECIFIED = 0; - - // Label detection. Detect objects, such as dog or flower. - LABEL_DETECTION = 1; - - // Human face detection and tracking. - FACE_DETECTION = 2; - - // Shot change detection. - SHOT_CHANGE_DETECTION = 3; - - // Safe search detection. - SAFE_SEARCH_DETECTION = 4; -} - -// Label level (scope). -enum LabelLevel { - // Unspecified. - LABEL_LEVEL_UNSPECIFIED = 0; - - // Video-level. Corresponds to the whole video. - VIDEO_LEVEL = 1; - - // Segment-level. Corresponds to one of `AnnotateSpec.segments`. - SEGMENT_LEVEL = 2; - - // Shot-level. Corresponds to a single shot (i.e. a series of frames - // without a major camera position or background change). - SHOT_LEVEL = 3; - - // Frame-level. Corresponds to a single video frame. - FRAME_LEVEL = 4; -} - -// Label detection mode. -enum LabelDetectionMode { - // Unspecified. - LABEL_DETECTION_MODE_UNSPECIFIED = 0; - - // Detect shot-level labels. - SHOT_MODE = 1; - - // Detect frame-level labels. - FRAME_MODE = 2; - - // Detect both shot-level and frame-level labels. - SHOT_AND_FRAME_MODE = 3; -} - -// Bucketized representation of likelihood. -enum Likelihood { - // Unknown likelihood. - UNKNOWN = 0; - - // Very unlikely. - VERY_UNLIKELY = 1; - - // Unlikely. - UNLIKELY = 2; - - // Possible. - POSSIBLE = 3; - - // Likely. - LIKELY = 4; - - // Very likely. - VERY_LIKELY = 5; -} diff --git a/videointelligence/google/cloud/videointelligence_v1beta1/proto/video_intelligence_pb2.py b/videointelligence/google/cloud/videointelligence_v1beta1/proto/video_intelligence_pb2.py deleted file mode 100644 index df9be878c05e..000000000000 --- a/videointelligence/google/cloud/videointelligence_v1beta1/proto/video_intelligence_pb2.py +++ /dev/null @@ -1,1800 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/videointelligence_v1beta1/proto/video_intelligence.proto - -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) -from google.protobuf.internal import enum_type_wrapper -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.longrunning import ( - operations_pb2 as google_dot_longrunning_dot_operations__pb2, -) -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -from google.rpc import status_pb2 as google_dot_rpc_dot_status__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/videointelligence_v1beta1/proto/video_intelligence.proto", - package="google.cloud.videointelligence.v1beta1", - syntax="proto3", - serialized_options=_b( - "\n*com.google.cloud.videointelligence.v1beta1B\035VideoIntelligenceServiceProtoP\001ZWgoogle.golang.org/genproto/googleapis/cloud/videointelligence/v1beta1;videointelligence\252\002&Google.Cloud.VideoIntelligence.V1Beta1\312\002&Google\\Cloud\\VideoIntelligence\\V1beta1\352\002)Google::Cloud::VideoIntelligence::V1beta1" - ), - serialized_pb=_b( - '\nEgoogle/cloud/videointelligence_v1beta1/proto/video_intelligence.proto\x12&google.cloud.videointelligence.v1beta1\x1a\x1cgoogle/api/annotations.proto\x1a#google/longrunning/operations.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto"\xf9\x01\n\x14\x41nnotateVideoRequest\x12\x11\n\tinput_uri\x18\x01 \x01(\t\x12\x15\n\rinput_content\x18\x06 \x01(\t\x12\x41\n\x08\x66\x65\x61tures\x18\x02 \x03(\x0e\x32/.google.cloud.videointelligence.v1beta1.Feature\x12K\n\rvideo_context\x18\x03 \x01(\x0b\x32\x34.google.cloud.videointelligence.v1beta1.VideoContext\x12\x12\n\noutput_uri\x18\x04 \x01(\t\x12\x13\n\x0blocation_id\x18\x05 \x01(\t"\xd2\x02\n\x0cVideoContext\x12\x46\n\x08segments\x18\x01 \x03(\x0b\x32\x34.google.cloud.videointelligence.v1beta1.VideoSegment\x12X\n\x14label_detection_mode\x18\x02 \x01(\x0e\x32:.google.cloud.videointelligence.v1beta1.LabelDetectionMode\x12\x19\n\x11stationary_camera\x18\x03 \x01(\x08\x12\x1d\n\x15label_detection_model\x18\x04 \x01(\t\x12\x1c\n\x14\x66\x61\x63\x65_detection_model\x18\x05 \x01(\t\x12#\n\x1bshot_change_detection_model\x18\x06 \x01(\t\x12#\n\x1bsafe_search_detection_model\x18\x07 \x01(\t"B\n\x0cVideoSegment\x12\x19\n\x11start_time_offset\x18\x01 \x01(\x03\x12\x17\n\x0f\x65nd_time_offset\x18\x02 \x01(\x03"\xad\x01\n\rLabelLocation\x12\x45\n\x07segment\x18\x01 \x01(\x0b\x32\x34.google.cloud.videointelligence.v1beta1.VideoSegment\x12\x12\n\nconfidence\x18\x02 \x01(\x02\x12\x41\n\x05level\x18\x03 \x01(\x0e\x32\x32.google.cloud.videointelligence.v1beta1.LabelLevel"\x87\x01\n\x0fLabelAnnotation\x12\x13\n\x0b\x64\x65scription\x18\x01 \x01(\t\x12\x15\n\rlanguage_code\x18\x02 \x01(\t\x12H\n\tlocations\x18\x03 \x03(\x0b\x32\x35.google.cloud.videointelligence.v1beta1.LabelLocation"\xfd\x02\n\x14SafeSearchAnnotation\x12\x41\n\x05\x61\x64ult\x18\x01 \x01(\x0e\x32\x32.google.cloud.videointelligence.v1beta1.Likelihood\x12\x41\n\x05spoof\x18\x02 \x01(\x0e\x32\x32.google.cloud.videointelligence.v1beta1.Likelihood\x12\x43\n\x07medical\x18\x03 \x01(\x0e\x32\x32.google.cloud.videointelligence.v1beta1.Likelihood\x12\x43\n\x07violent\x18\x04 \x01(\x0e\x32\x32.google.cloud.videointelligence.v1beta1.Likelihood\x12@\n\x04racy\x18\x05 \x01(\x0e\x32\x32.google.cloud.videointelligence.v1beta1.Likelihood\x12\x13\n\x0btime_offset\x18\x06 \x01(\x03"G\n\x0b\x42oundingBox\x12\x0c\n\x04left\x18\x01 \x01(\x05\x12\r\n\x05right\x18\x02 \x01(\x05\x12\x0e\n\x06\x62ottom\x18\x03 \x01(\x05\x12\x0b\n\x03top\x18\x04 \x01(\x05"n\n\x0c\x46\x61\x63\x65Location\x12I\n\x0c\x62ounding_box\x18\x01 \x01(\x0b\x32\x33.google.cloud.videointelligence.v1beta1.BoundingBox\x12\x13\n\x0btime_offset\x18\x02 \x01(\x03"\xb4\x01\n\x0e\x46\x61\x63\x65\x41nnotation\x12\x11\n\tthumbnail\x18\x01 \x01(\t\x12\x46\n\x08segments\x18\x02 \x03(\x0b\x32\x34.google.cloud.videointelligence.v1beta1.VideoSegment\x12G\n\tlocations\x18\x03 \x03(\x0b\x32\x34.google.cloud.videointelligence.v1beta1.FaceLocation"\xa3\x03\n\x16VideoAnnotationResults\x12\x11\n\tinput_uri\x18\x01 \x01(\t\x12R\n\x11label_annotations\x18\x02 \x03(\x0b\x32\x37.google.cloud.videointelligence.v1beta1.LabelAnnotation\x12P\n\x10\x66\x61\x63\x65_annotations\x18\x03 \x03(\x0b\x32\x36.google.cloud.videointelligence.v1beta1.FaceAnnotation\x12N\n\x10shot_annotations\x18\x04 \x03(\x0b\x32\x34.google.cloud.videointelligence.v1beta1.VideoSegment\x12]\n\x17safe_search_annotations\x18\x06 \x03(\x0b\x32<.google.cloud.videointelligence.v1beta1.SafeSearchAnnotation\x12!\n\x05\x65rror\x18\x05 \x01(\x0b\x32\x12.google.rpc.Status"s\n\x15\x41nnotateVideoResponse\x12Z\n\x12\x61nnotation_results\x18\x01 \x03(\x0b\x32>.google.cloud.videointelligence.v1beta1.VideoAnnotationResults"\xa7\x01\n\x17VideoAnnotationProgress\x12\x11\n\tinput_uri\x18\x01 \x01(\t\x12\x18\n\x10progress_percent\x18\x02 \x01(\x05\x12.\n\nstart_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0bupdate_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"u\n\x15\x41nnotateVideoProgress\x12\\\n\x13\x61nnotation_progress\x18\x01 \x03(\x0b\x32?.google.cloud.videointelligence.v1beta1.VideoAnnotationProgress*\x81\x01\n\x07\x46\x65\x61ture\x12\x17\n\x13\x46\x45\x41TURE_UNSPECIFIED\x10\x00\x12\x13\n\x0fLABEL_DETECTION\x10\x01\x12\x12\n\x0e\x46\x41\x43\x45_DETECTION\x10\x02\x12\x19\n\x15SHOT_CHANGE_DETECTION\x10\x03\x12\x19\n\x15SAFE_SEARCH_DETECTION\x10\x04*n\n\nLabelLevel\x12\x1b\n\x17LABEL_LEVEL_UNSPECIFIED\x10\x00\x12\x0f\n\x0bVIDEO_LEVEL\x10\x01\x12\x11\n\rSEGMENT_LEVEL\x10\x02\x12\x0e\n\nSHOT_LEVEL\x10\x03\x12\x0f\n\x0b\x46RAME_LEVEL\x10\x04*r\n\x12LabelDetectionMode\x12$\n LABEL_DETECTION_MODE_UNSPECIFIED\x10\x00\x12\r\n\tSHOT_MODE\x10\x01\x12\x0e\n\nFRAME_MODE\x10\x02\x12\x17\n\x13SHOT_AND_FRAME_MODE\x10\x03*e\n\nLikelihood\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x11\n\rVERY_UNLIKELY\x10\x01\x12\x0c\n\x08UNLIKELY\x10\x02\x12\x0c\n\x08POSSIBLE\x10\x03\x12\n\n\x06LIKELY\x10\x04\x12\x0f\n\x0bVERY_LIKELY\x10\x05\x32\xae\x01\n\x18VideoIntelligenceService\x12\x91\x01\n\rAnnotateVideo\x12<.google.cloud.videointelligence.v1beta1.AnnotateVideoRequest\x1a\x1d.google.longrunning.Operation"#\x82\xd3\xe4\x93\x02\x1d"\x18/v1beta1/videos:annotate:\x01*B\xa4\x02\n*com.google.cloud.videointelligence.v1beta1B\x1dVideoIntelligenceServiceProtoP\x01ZWgoogle.golang.org/genproto/googleapis/cloud/videointelligence/v1beta1;videointelligence\xaa\x02&Google.Cloud.VideoIntelligence.V1Beta1\xca\x02&Google\\Cloud\\VideoIntelligence\\V1beta1\xea\x02)Google::Cloud::VideoIntelligence::V1beta1b\x06proto3' - ), - dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - google_dot_longrunning_dot_operations__pb2.DESCRIPTOR, - google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, - google_dot_rpc_dot_status__pb2.DESCRIPTOR, - ], -) - -_FEATURE = _descriptor.EnumDescriptor( - name="Feature", - full_name="google.cloud.videointelligence.v1beta1.Feature", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="FEATURE_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="LABEL_DETECTION", - index=1, - number=1, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="FACE_DETECTION", index=2, number=2, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="SHOT_CHANGE_DETECTION", - index=3, - number=3, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="SAFE_SEARCH_DETECTION", - index=4, - number=4, - serialized_options=None, - type=None, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=2794, - serialized_end=2923, -) -_sym_db.RegisterEnumDescriptor(_FEATURE) - -Feature = enum_type_wrapper.EnumTypeWrapper(_FEATURE) -_LABELLEVEL = _descriptor.EnumDescriptor( - name="LabelLevel", - full_name="google.cloud.videointelligence.v1beta1.LabelLevel", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="LABEL_LEVEL_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="VIDEO_LEVEL", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="SEGMENT_LEVEL", index=2, number=2, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="SHOT_LEVEL", index=3, number=3, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="FRAME_LEVEL", index=4, number=4, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=2925, - serialized_end=3035, -) -_sym_db.RegisterEnumDescriptor(_LABELLEVEL) - -LabelLevel = enum_type_wrapper.EnumTypeWrapper(_LABELLEVEL) -_LABELDETECTIONMODE = _descriptor.EnumDescriptor( - name="LabelDetectionMode", - full_name="google.cloud.videointelligence.v1beta1.LabelDetectionMode", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="LABEL_DETECTION_MODE_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="SHOT_MODE", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="FRAME_MODE", index=2, number=2, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="SHOT_AND_FRAME_MODE", - index=3, - number=3, - serialized_options=None, - type=None, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=3037, - serialized_end=3151, -) -_sym_db.RegisterEnumDescriptor(_LABELDETECTIONMODE) - -LabelDetectionMode = enum_type_wrapper.EnumTypeWrapper(_LABELDETECTIONMODE) -_LIKELIHOOD = _descriptor.EnumDescriptor( - name="Likelihood", - full_name="google.cloud.videointelligence.v1beta1.Likelihood", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="UNKNOWN", index=0, number=0, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="VERY_UNLIKELY", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="UNLIKELY", index=2, number=2, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="POSSIBLE", index=3, number=3, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="LIKELY", index=4, number=4, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="VERY_LIKELY", index=5, number=5, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=3153, - serialized_end=3254, -) -_sym_db.RegisterEnumDescriptor(_LIKELIHOOD) - -Likelihood = enum_type_wrapper.EnumTypeWrapper(_LIKELIHOOD) -FEATURE_UNSPECIFIED = 0 -LABEL_DETECTION = 1 -FACE_DETECTION = 2 -SHOT_CHANGE_DETECTION = 3 -SAFE_SEARCH_DETECTION = 4 -LABEL_LEVEL_UNSPECIFIED = 0 -VIDEO_LEVEL = 1 -SEGMENT_LEVEL = 2 -SHOT_LEVEL = 3 -FRAME_LEVEL = 4 -LABEL_DETECTION_MODE_UNSPECIFIED = 0 -SHOT_MODE = 1 -FRAME_MODE = 2 -SHOT_AND_FRAME_MODE = 3 -UNKNOWN = 0 -VERY_UNLIKELY = 1 -UNLIKELY = 2 -POSSIBLE = 3 -LIKELY = 4 -VERY_LIKELY = 5 - - -_ANNOTATEVIDEOREQUEST = _descriptor.Descriptor( - name="AnnotateVideoRequest", - full_name="google.cloud.videointelligence.v1beta1.AnnotateVideoRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="input_uri", - full_name="google.cloud.videointelligence.v1beta1.AnnotateVideoRequest.input_uri", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="input_content", - full_name="google.cloud.videointelligence.v1beta1.AnnotateVideoRequest.input_content", - index=1, - number=6, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="features", - full_name="google.cloud.videointelligence.v1beta1.AnnotateVideoRequest.features", - index=2, - number=2, - type=14, - cpp_type=8, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="video_context", - full_name="google.cloud.videointelligence.v1beta1.AnnotateVideoRequest.video_context", - index=3, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="output_uri", - full_name="google.cloud.videointelligence.v1beta1.AnnotateVideoRequest.output_uri", - index=4, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="location_id", - full_name="google.cloud.videointelligence.v1beta1.AnnotateVideoRequest.location_id", - index=5, - number=5, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=239, - serialized_end=488, -) - - -_VIDEOCONTEXT = _descriptor.Descriptor( - name="VideoContext", - full_name="google.cloud.videointelligence.v1beta1.VideoContext", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="segments", - full_name="google.cloud.videointelligence.v1beta1.VideoContext.segments", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="label_detection_mode", - full_name="google.cloud.videointelligence.v1beta1.VideoContext.label_detection_mode", - index=1, - number=2, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="stationary_camera", - full_name="google.cloud.videointelligence.v1beta1.VideoContext.stationary_camera", - index=2, - number=3, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="label_detection_model", - full_name="google.cloud.videointelligence.v1beta1.VideoContext.label_detection_model", - index=3, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="face_detection_model", - full_name="google.cloud.videointelligence.v1beta1.VideoContext.face_detection_model", - index=4, - number=5, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="shot_change_detection_model", - full_name="google.cloud.videointelligence.v1beta1.VideoContext.shot_change_detection_model", - index=5, - number=6, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="safe_search_detection_model", - full_name="google.cloud.videointelligence.v1beta1.VideoContext.safe_search_detection_model", - index=6, - number=7, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=491, - serialized_end=829, -) - - -_VIDEOSEGMENT = _descriptor.Descriptor( - name="VideoSegment", - full_name="google.cloud.videointelligence.v1beta1.VideoSegment", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="start_time_offset", - full_name="google.cloud.videointelligence.v1beta1.VideoSegment.start_time_offset", - index=0, - number=1, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="end_time_offset", - full_name="google.cloud.videointelligence.v1beta1.VideoSegment.end_time_offset", - index=1, - number=2, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=831, - serialized_end=897, -) - - -_LABELLOCATION = _descriptor.Descriptor( - name="LabelLocation", - full_name="google.cloud.videointelligence.v1beta1.LabelLocation", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="segment", - full_name="google.cloud.videointelligence.v1beta1.LabelLocation.segment", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="confidence", - full_name="google.cloud.videointelligence.v1beta1.LabelLocation.confidence", - index=1, - number=2, - type=2, - cpp_type=6, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="level", - full_name="google.cloud.videointelligence.v1beta1.LabelLocation.level", - index=2, - number=3, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=900, - serialized_end=1073, -) - - -_LABELANNOTATION = _descriptor.Descriptor( - name="LabelAnnotation", - full_name="google.cloud.videointelligence.v1beta1.LabelAnnotation", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="description", - full_name="google.cloud.videointelligence.v1beta1.LabelAnnotation.description", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="language_code", - full_name="google.cloud.videointelligence.v1beta1.LabelAnnotation.language_code", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="locations", - full_name="google.cloud.videointelligence.v1beta1.LabelAnnotation.locations", - index=2, - number=3, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1076, - serialized_end=1211, -) - - -_SAFESEARCHANNOTATION = _descriptor.Descriptor( - name="SafeSearchAnnotation", - full_name="google.cloud.videointelligence.v1beta1.SafeSearchAnnotation", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="adult", - full_name="google.cloud.videointelligence.v1beta1.SafeSearchAnnotation.adult", - index=0, - number=1, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="spoof", - full_name="google.cloud.videointelligence.v1beta1.SafeSearchAnnotation.spoof", - index=1, - number=2, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="medical", - full_name="google.cloud.videointelligence.v1beta1.SafeSearchAnnotation.medical", - index=2, - number=3, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="violent", - full_name="google.cloud.videointelligence.v1beta1.SafeSearchAnnotation.violent", - index=3, - number=4, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="racy", - full_name="google.cloud.videointelligence.v1beta1.SafeSearchAnnotation.racy", - index=4, - number=5, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="time_offset", - full_name="google.cloud.videointelligence.v1beta1.SafeSearchAnnotation.time_offset", - index=5, - number=6, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1214, - serialized_end=1595, -) - - -_BOUNDINGBOX = _descriptor.Descriptor( - name="BoundingBox", - full_name="google.cloud.videointelligence.v1beta1.BoundingBox", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="left", - full_name="google.cloud.videointelligence.v1beta1.BoundingBox.left", - index=0, - number=1, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="right", - full_name="google.cloud.videointelligence.v1beta1.BoundingBox.right", - index=1, - number=2, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="bottom", - full_name="google.cloud.videointelligence.v1beta1.BoundingBox.bottom", - index=2, - number=3, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="top", - full_name="google.cloud.videointelligence.v1beta1.BoundingBox.top", - index=3, - number=4, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1597, - serialized_end=1668, -) - - -_FACELOCATION = _descriptor.Descriptor( - name="FaceLocation", - full_name="google.cloud.videointelligence.v1beta1.FaceLocation", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="bounding_box", - full_name="google.cloud.videointelligence.v1beta1.FaceLocation.bounding_box", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="time_offset", - full_name="google.cloud.videointelligence.v1beta1.FaceLocation.time_offset", - index=1, - number=2, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1670, - serialized_end=1780, -) - - -_FACEANNOTATION = _descriptor.Descriptor( - name="FaceAnnotation", - full_name="google.cloud.videointelligence.v1beta1.FaceAnnotation", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="thumbnail", - full_name="google.cloud.videointelligence.v1beta1.FaceAnnotation.thumbnail", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="segments", - full_name="google.cloud.videointelligence.v1beta1.FaceAnnotation.segments", - index=1, - number=2, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="locations", - full_name="google.cloud.videointelligence.v1beta1.FaceAnnotation.locations", - index=2, - number=3, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1783, - serialized_end=1963, -) - - -_VIDEOANNOTATIONRESULTS = _descriptor.Descriptor( - name="VideoAnnotationResults", - full_name="google.cloud.videointelligence.v1beta1.VideoAnnotationResults", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="input_uri", - full_name="google.cloud.videointelligence.v1beta1.VideoAnnotationResults.input_uri", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="label_annotations", - full_name="google.cloud.videointelligence.v1beta1.VideoAnnotationResults.label_annotations", - index=1, - number=2, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="face_annotations", - full_name="google.cloud.videointelligence.v1beta1.VideoAnnotationResults.face_annotations", - index=2, - number=3, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="shot_annotations", - full_name="google.cloud.videointelligence.v1beta1.VideoAnnotationResults.shot_annotations", - index=3, - number=4, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="safe_search_annotations", - full_name="google.cloud.videointelligence.v1beta1.VideoAnnotationResults.safe_search_annotations", - index=4, - number=6, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="error", - full_name="google.cloud.videointelligence.v1beta1.VideoAnnotationResults.error", - index=5, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1966, - serialized_end=2385, -) - - -_ANNOTATEVIDEORESPONSE = _descriptor.Descriptor( - name="AnnotateVideoResponse", - full_name="google.cloud.videointelligence.v1beta1.AnnotateVideoResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="annotation_results", - full_name="google.cloud.videointelligence.v1beta1.AnnotateVideoResponse.annotation_results", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2387, - serialized_end=2502, -) - - -_VIDEOANNOTATIONPROGRESS = _descriptor.Descriptor( - name="VideoAnnotationProgress", - full_name="google.cloud.videointelligence.v1beta1.VideoAnnotationProgress", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="input_uri", - full_name="google.cloud.videointelligence.v1beta1.VideoAnnotationProgress.input_uri", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=_b("").decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="progress_percent", - full_name="google.cloud.videointelligence.v1beta1.VideoAnnotationProgress.progress_percent", - index=1, - number=2, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="start_time", - full_name="google.cloud.videointelligence.v1beta1.VideoAnnotationProgress.start_time", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="update_time", - full_name="google.cloud.videointelligence.v1beta1.VideoAnnotationProgress.update_time", - index=3, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2505, - serialized_end=2672, -) - - -_ANNOTATEVIDEOPROGRESS = _descriptor.Descriptor( - name="AnnotateVideoProgress", - full_name="google.cloud.videointelligence.v1beta1.AnnotateVideoProgress", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="annotation_progress", - full_name="google.cloud.videointelligence.v1beta1.AnnotateVideoProgress.annotation_progress", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2674, - serialized_end=2791, -) - -_ANNOTATEVIDEOREQUEST.fields_by_name["features"].enum_type = _FEATURE -_ANNOTATEVIDEOREQUEST.fields_by_name["video_context"].message_type = _VIDEOCONTEXT -_VIDEOCONTEXT.fields_by_name["segments"].message_type = _VIDEOSEGMENT -_VIDEOCONTEXT.fields_by_name["label_detection_mode"].enum_type = _LABELDETECTIONMODE -_LABELLOCATION.fields_by_name["segment"].message_type = _VIDEOSEGMENT -_LABELLOCATION.fields_by_name["level"].enum_type = _LABELLEVEL -_LABELANNOTATION.fields_by_name["locations"].message_type = _LABELLOCATION -_SAFESEARCHANNOTATION.fields_by_name["adult"].enum_type = _LIKELIHOOD -_SAFESEARCHANNOTATION.fields_by_name["spoof"].enum_type = _LIKELIHOOD -_SAFESEARCHANNOTATION.fields_by_name["medical"].enum_type = _LIKELIHOOD -_SAFESEARCHANNOTATION.fields_by_name["violent"].enum_type = _LIKELIHOOD -_SAFESEARCHANNOTATION.fields_by_name["racy"].enum_type = _LIKELIHOOD -_FACELOCATION.fields_by_name["bounding_box"].message_type = _BOUNDINGBOX -_FACEANNOTATION.fields_by_name["segments"].message_type = _VIDEOSEGMENT -_FACEANNOTATION.fields_by_name["locations"].message_type = _FACELOCATION -_VIDEOANNOTATIONRESULTS.fields_by_name[ - "label_annotations" -].message_type = _LABELANNOTATION -_VIDEOANNOTATIONRESULTS.fields_by_name[ - "face_annotations" -].message_type = _FACEANNOTATION -_VIDEOANNOTATIONRESULTS.fields_by_name["shot_annotations"].message_type = _VIDEOSEGMENT -_VIDEOANNOTATIONRESULTS.fields_by_name[ - "safe_search_annotations" -].message_type = _SAFESEARCHANNOTATION -_VIDEOANNOTATIONRESULTS.fields_by_name[ - "error" -].message_type = google_dot_rpc_dot_status__pb2._STATUS -_ANNOTATEVIDEORESPONSE.fields_by_name[ - "annotation_results" -].message_type = _VIDEOANNOTATIONRESULTS -_VIDEOANNOTATIONPROGRESS.fields_by_name[ - "start_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_VIDEOANNOTATIONPROGRESS.fields_by_name[ - "update_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_ANNOTATEVIDEOPROGRESS.fields_by_name[ - "annotation_progress" -].message_type = _VIDEOANNOTATIONPROGRESS -DESCRIPTOR.message_types_by_name["AnnotateVideoRequest"] = _ANNOTATEVIDEOREQUEST -DESCRIPTOR.message_types_by_name["VideoContext"] = _VIDEOCONTEXT -DESCRIPTOR.message_types_by_name["VideoSegment"] = _VIDEOSEGMENT -DESCRIPTOR.message_types_by_name["LabelLocation"] = _LABELLOCATION -DESCRIPTOR.message_types_by_name["LabelAnnotation"] = _LABELANNOTATION -DESCRIPTOR.message_types_by_name["SafeSearchAnnotation"] = _SAFESEARCHANNOTATION -DESCRIPTOR.message_types_by_name["BoundingBox"] = _BOUNDINGBOX -DESCRIPTOR.message_types_by_name["FaceLocation"] = _FACELOCATION -DESCRIPTOR.message_types_by_name["FaceAnnotation"] = _FACEANNOTATION -DESCRIPTOR.message_types_by_name["VideoAnnotationResults"] = _VIDEOANNOTATIONRESULTS -DESCRIPTOR.message_types_by_name["AnnotateVideoResponse"] = _ANNOTATEVIDEORESPONSE -DESCRIPTOR.message_types_by_name["VideoAnnotationProgress"] = _VIDEOANNOTATIONPROGRESS -DESCRIPTOR.message_types_by_name["AnnotateVideoProgress"] = _ANNOTATEVIDEOPROGRESS -DESCRIPTOR.enum_types_by_name["Feature"] = _FEATURE -DESCRIPTOR.enum_types_by_name["LabelLevel"] = _LABELLEVEL -DESCRIPTOR.enum_types_by_name["LabelDetectionMode"] = _LABELDETECTIONMODE -DESCRIPTOR.enum_types_by_name["Likelihood"] = _LIKELIHOOD -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -AnnotateVideoRequest = _reflection.GeneratedProtocolMessageType( - "AnnotateVideoRequest", - (_message.Message,), - dict( - DESCRIPTOR=_ANNOTATEVIDEOREQUEST, - __module__="google.cloud.videointelligence_v1beta1.proto.video_intelligence_pb2", - __doc__="""Video annotation request. - - - Attributes: - input_uri: - Input video location. Currently, only `Google Cloud Storage - `__ URIs are supported, - which must be specified in the following format: - ``gs://bucket-id/object-id`` (other URI formats return [google - .rpc.Code.INVALID\_ARGUMENT][google.rpc.Code.INVALID\_ARGUMENT - ]). For more information, see `Request URIs - `__. A video URI may include - wildcards in ``object-id``, and thus identify multiple videos. - Supported wildcards: '\*' to match 0 or more characters; '?' - to match 1 character. If unset, the input video should be - embedded in the request as ``input_content``. If set, - ``input_content`` should be unset. - input_content: - The video data bytes. Encoding: base64. If unset, the input - video(s) should be specified via ``input_uri``. If set, - ``input_uri`` should be unset. - features: - Requested video annotation features. - video_context: - Additional video context and/or feature-specific parameters. - output_uri: - Optional location where the output (in JSON format) should be - stored. Currently, only `Google Cloud Storage - `__ URIs are supported, - which must be specified in the following format: - ``gs://bucket-id/object-id`` (other URI formats return [google - .rpc.Code.INVALID\_ARGUMENT][google.rpc.Code.INVALID\_ARGUMENT - ]). For more information, see `Request URIs - `__. - location_id: - Optional cloud region where annotation should take place. - Supported cloud regions: ``us-east1``, ``us-west1``, ``europe- - west1``, ``asia-east1``. If no region is specified, a region - will be determined based on video file location. - """, - # @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1beta1.AnnotateVideoRequest) - ), -) -_sym_db.RegisterMessage(AnnotateVideoRequest) - -VideoContext = _reflection.GeneratedProtocolMessageType( - "VideoContext", - (_message.Message,), - dict( - DESCRIPTOR=_VIDEOCONTEXT, - __module__="google.cloud.videointelligence_v1beta1.proto.video_intelligence_pb2", - __doc__="""Video context and/or feature-specific parameters. - - - Attributes: - segments: - Video segments to annotate. The segments may overlap and are - not required to be contiguous or span the whole video. If - unspecified, each video is treated as a single segment. - label_detection_mode: - If label detection has been requested, what labels should be - detected in addition to video-level labels or segment-level - labels. If unspecified, defaults to ``SHOT_MODE``. - stationary_camera: - Whether the video has been shot from a stationary (i.e. non- - moving) camera. When set to true, might improve detection - accuracy for moving objects. - label_detection_model: - Model to use for label detection. Supported values: "latest" - and "stable" (the default). - face_detection_model: - Model to use for face detection. Supported values: "latest" - and "stable" (the default). - shot_change_detection_model: - Model to use for shot change detection. Supported values: - "latest" and "stable" (the default). - safe_search_detection_model: - Model to use for safe search detection. Supported values: - "latest" and "stable" (the default). - """, - # @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1beta1.VideoContext) - ), -) -_sym_db.RegisterMessage(VideoContext) - -VideoSegment = _reflection.GeneratedProtocolMessageType( - "VideoSegment", - (_message.Message,), - dict( - DESCRIPTOR=_VIDEOSEGMENT, - __module__="google.cloud.videointelligence_v1beta1.proto.video_intelligence_pb2", - __doc__="""Video segment. - - - Attributes: - start_time_offset: - Start offset in microseconds (inclusive). Unset means 0. - end_time_offset: - End offset in microseconds (inclusive). Unset means 0. - """, - # @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1beta1.VideoSegment) - ), -) -_sym_db.RegisterMessage(VideoSegment) - -LabelLocation = _reflection.GeneratedProtocolMessageType( - "LabelLocation", - (_message.Message,), - dict( - DESCRIPTOR=_LABELLOCATION, - __module__="google.cloud.videointelligence_v1beta1.proto.video_intelligence_pb2", - __doc__="""Label location. - - - Attributes: - segment: - Video segment. Set to [-1, -1] for video-level labels. Set to - [timestamp, timestamp] for frame-level labels. Otherwise, - corresponds to one of ``AnnotateSpec.segments`` (if specified) - or to shot boundaries (if requested). - confidence: - Confidence that the label is accurate. Range: [0, 1]. - level: - Label level. - """, - # @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1beta1.LabelLocation) - ), -) -_sym_db.RegisterMessage(LabelLocation) - -LabelAnnotation = _reflection.GeneratedProtocolMessageType( - "LabelAnnotation", - (_message.Message,), - dict( - DESCRIPTOR=_LABELANNOTATION, - __module__="google.cloud.videointelligence_v1beta1.proto.video_intelligence_pb2", - __doc__="""Label annotation. - - - Attributes: - description: - Textual description, e.g. ``Fixed-gear bicycle``. - language_code: - Language code for ``description`` in BCP-47 format. - locations: - Where the label was detected and with what confidence. - """, - # @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1beta1.LabelAnnotation) - ), -) -_sym_db.RegisterMessage(LabelAnnotation) - -SafeSearchAnnotation = _reflection.GeneratedProtocolMessageType( - "SafeSearchAnnotation", - (_message.Message,), - dict( - DESCRIPTOR=_SAFESEARCHANNOTATION, - __module__="google.cloud.videointelligence_v1beta1.proto.video_intelligence_pb2", - __doc__="""Safe search annotation (based on per-frame visual signals only). If no - unsafe content has been detected in a frame, no annotations are present - for that frame. If only some types of unsafe content have been detected - in a frame, the likelihood is set to ``UNKNOWN`` for all other types of - unsafe content. - - - Attributes: - adult: - Likelihood of adult content. - spoof: - Likelihood that an obvious modification was made to the - original version to make it appear funny or offensive. - medical: - Likelihood of medical content. - violent: - Likelihood of violent content. - racy: - Likelihood of racy content. - time_offset: - Video time offset in microseconds. - """, - # @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1beta1.SafeSearchAnnotation) - ), -) -_sym_db.RegisterMessage(SafeSearchAnnotation) - -BoundingBox = _reflection.GeneratedProtocolMessageType( - "BoundingBox", - (_message.Message,), - dict( - DESCRIPTOR=_BOUNDINGBOX, - __module__="google.cloud.videointelligence_v1beta1.proto.video_intelligence_pb2", - __doc__="""Bounding box. - - - Attributes: - left: - Left X coordinate. - right: - Right X coordinate. - bottom: - Bottom Y coordinate. - top: - Top Y coordinate. - """, - # @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1beta1.BoundingBox) - ), -) -_sym_db.RegisterMessage(BoundingBox) - -FaceLocation = _reflection.GeneratedProtocolMessageType( - "FaceLocation", - (_message.Message,), - dict( - DESCRIPTOR=_FACELOCATION, - __module__="google.cloud.videointelligence_v1beta1.proto.video_intelligence_pb2", - __doc__="""Face location. - - - Attributes: - bounding_box: - Bounding box in a frame. - time_offset: - Video time offset in microseconds. - """, - # @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1beta1.FaceLocation) - ), -) -_sym_db.RegisterMessage(FaceLocation) - -FaceAnnotation = _reflection.GeneratedProtocolMessageType( - "FaceAnnotation", - (_message.Message,), - dict( - DESCRIPTOR=_FACEANNOTATION, - __module__="google.cloud.videointelligence_v1beta1.proto.video_intelligence_pb2", - __doc__="""Face annotation. - - - Attributes: - thumbnail: - Thumbnail of a representative face view (in JPEG format). - Encoding: base64. - segments: - All locations where a face was detected. Faces are detected - and tracked on a per-video basis (as opposed to across - multiple videos). - locations: - Face locations at one frame per second. - """, - # @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1beta1.FaceAnnotation) - ), -) -_sym_db.RegisterMessage(FaceAnnotation) - -VideoAnnotationResults = _reflection.GeneratedProtocolMessageType( - "VideoAnnotationResults", - (_message.Message,), - dict( - DESCRIPTOR=_VIDEOANNOTATIONRESULTS, - __module__="google.cloud.videointelligence_v1beta1.proto.video_intelligence_pb2", - __doc__="""Annotation results for a single video. - - - Attributes: - input_uri: - Video file location in `Google Cloud Storage - `__. - label_annotations: - Label annotations. There is exactly one element for each - unique label. - face_annotations: - Face annotations. There is exactly one element for each unique - face. - shot_annotations: - Shot annotations. Each shot is represented as a video segment. - safe_search_annotations: - Safe search annotations. - error: - If set, indicates an error. Note that for a single - ``AnnotateVideoRequest`` some videos may succeed and some may - fail. - """, - # @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1beta1.VideoAnnotationResults) - ), -) -_sym_db.RegisterMessage(VideoAnnotationResults) - -AnnotateVideoResponse = _reflection.GeneratedProtocolMessageType( - "AnnotateVideoResponse", - (_message.Message,), - dict( - DESCRIPTOR=_ANNOTATEVIDEORESPONSE, - __module__="google.cloud.videointelligence_v1beta1.proto.video_intelligence_pb2", - __doc__="""Video annotation response. Included in the ``response`` field of the - ``Operation`` returned by the ``GetOperation`` call of the - ``google::longrunning::Operations`` service. - - - Attributes: - annotation_results: - Annotation results for all videos specified in - ``AnnotateVideoRequest``. - """, - # @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1beta1.AnnotateVideoResponse) - ), -) -_sym_db.RegisterMessage(AnnotateVideoResponse) - -VideoAnnotationProgress = _reflection.GeneratedProtocolMessageType( - "VideoAnnotationProgress", - (_message.Message,), - dict( - DESCRIPTOR=_VIDEOANNOTATIONPROGRESS, - __module__="google.cloud.videointelligence_v1beta1.proto.video_intelligence_pb2", - __doc__="""Annotation progress for a single video. - - - Attributes: - input_uri: - Video file location in `Google Cloud Storage - `__. - progress_percent: - Approximate percentage processed thus far. Guaranteed to be - 100 when fully processed. - start_time: - Time when the request was received. - update_time: - Time of the most recent update. - """, - # @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1beta1.VideoAnnotationProgress) - ), -) -_sym_db.RegisterMessage(VideoAnnotationProgress) - -AnnotateVideoProgress = _reflection.GeneratedProtocolMessageType( - "AnnotateVideoProgress", - (_message.Message,), - dict( - DESCRIPTOR=_ANNOTATEVIDEOPROGRESS, - __module__="google.cloud.videointelligence_v1beta1.proto.video_intelligence_pb2", - __doc__="""Video annotation progress. Included in the ``metadata`` field of the - ``Operation`` returned by the ``GetOperation`` call of the - ``google::longrunning::Operations`` service. - - - Attributes: - annotation_progress: - Progress metadata for all videos specified in - ``AnnotateVideoRequest``. - """, - # @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1beta1.AnnotateVideoProgress) - ), -) -_sym_db.RegisterMessage(AnnotateVideoProgress) - - -DESCRIPTOR._options = None - -_VIDEOINTELLIGENCESERVICE = _descriptor.ServiceDescriptor( - name="VideoIntelligenceService", - full_name="google.cloud.videointelligence.v1beta1.VideoIntelligenceService", - file=DESCRIPTOR, - index=0, - serialized_options=None, - serialized_start=3257, - serialized_end=3431, - methods=[ - _descriptor.MethodDescriptor( - name="AnnotateVideo", - full_name="google.cloud.videointelligence.v1beta1.VideoIntelligenceService.AnnotateVideo", - index=0, - containing_service=None, - input_type=_ANNOTATEVIDEOREQUEST, - output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, - serialized_options=_b( - '\202\323\344\223\002\035"\030/v1beta1/videos:annotate:\001*' - ), - ) - ], -) -_sym_db.RegisterServiceDescriptor(_VIDEOINTELLIGENCESERVICE) - -DESCRIPTOR.services_by_name["VideoIntelligenceService"] = _VIDEOINTELLIGENCESERVICE - -# @@protoc_insertion_point(module_scope) diff --git a/videointelligence/google/cloud/videointelligence_v1beta1/proto/video_intelligence_pb2_grpc.py b/videointelligence/google/cloud/videointelligence_v1beta1/proto/video_intelligence_pb2_grpc.py deleted file mode 100644 index a8a8f1125e20..000000000000 --- a/videointelligence/google/cloud/videointelligence_v1beta1/proto/video_intelligence_pb2_grpc.py +++ /dev/null @@ -1,56 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc - -from google.cloud.videointelligence_v1beta1.proto import ( - video_intelligence_pb2 as google_dot_cloud_dot_videointelligence__v1beta1_dot_proto_dot_video__intelligence__pb2, -) -from google.longrunning import ( - operations_pb2 as google_dot_longrunning_dot_operations__pb2, -) - - -class VideoIntelligenceServiceStub(object): - """Service that implements Google Cloud Video Intelligence API. - """ - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.AnnotateVideo = channel.unary_unary( - "/google.cloud.videointelligence.v1beta1.VideoIntelligenceService/AnnotateVideo", - request_serializer=google_dot_cloud_dot_videointelligence__v1beta1_dot_proto_dot_video__intelligence__pb2.AnnotateVideoRequest.SerializeToString, - response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString, - ) - - -class VideoIntelligenceServiceServicer(object): - """Service that implements Google Cloud Video Intelligence API. - """ - - def AnnotateVideo(self, request, context): - """Performs asynchronous video annotation. Progress and results can be - retrieved through the `google.longrunning.Operations` interface. - `Operation.metadata` contains `AnnotateVideoProgress` (progress). - `Operation.response` contains `AnnotateVideoResponse` (results). - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - -def add_VideoIntelligenceServiceServicer_to_server(servicer, server): - rpc_method_handlers = { - "AnnotateVideo": grpc.unary_unary_rpc_method_handler( - servicer.AnnotateVideo, - request_deserializer=google_dot_cloud_dot_videointelligence__v1beta1_dot_proto_dot_video__intelligence__pb2.AnnotateVideoRequest.FromString, - response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, - ) - } - generic_handler = grpc.method_handlers_generic_handler( - "google.cloud.videointelligence.v1beta1.VideoIntelligenceService", - rpc_method_handlers, - ) - server.add_generic_rpc_handlers((generic_handler,)) diff --git a/videointelligence/synth.metadata b/videointelligence/synth.metadata index 1a3cca9bbf71..76a7f46ac8fd 100644 --- a/videointelligence/synth.metadata +++ b/videointelligence/synth.metadata @@ -1,40 +1,30 @@ { - "updateTime": "2019-08-07T12:41:23.743321Z", + "updateTime": "2019-10-28T21:37:18.071018Z", "sources": [ { "generator": { "name": "artman", - "version": "0.32.1", - "dockerImage": "googleapis/artman@sha256:a684d40ba9a4e15946f5f2ca6b4bd9fe301192f522e9de4fff622118775f309b" + "version": "0.40.3", + "dockerImage": "googleapis/artman@sha256:c805f50525f5f557886c94ab76f56eaa09cb1da58c3ee95111fd34259376621a" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "6d29882872298c8bfbaef33bd69bfca275c4d2eb", - "internalRef": "262019251" + "sha": "3fb0873b5f8a4cf5be4f848d20e5ccb2bdee1a67", + "internalRef": "277134185" } }, { "template": { "name": "python_library", "origin": "synthtool.gcp", - "version": "2019.5.2" + "version": "2019.10.17" } } ], "destinations": [ - { - "client": { - "source": "googleapis", - "apiName": "videointelligence", - "apiVersion": "v1beta1", - "language": "python", - "generator": "gapic", - "config": "google/cloud/videointelligence/artman_videointelligence_v1beta1.yaml" - } - }, { "client": { "source": "googleapis", diff --git a/videointelligence/synth.py b/videointelligence/synth.py index 3e5d8389829c..8cb0ac1d98cb 100644 --- a/videointelligence/synth.py +++ b/videointelligence/synth.py @@ -20,7 +20,7 @@ gapic = gcp.GAPICGenerator() common = gcp.CommonTemplates() -versions = ["v1beta1", "v1beta2", "v1p1beta1", "v1p2beta1", "v1p3beta1", "v1"] +versions = ["v1beta2", "v1p1beta1", "v1p2beta1", "v1p3beta1", "v1"] # ---------------------------------------------------------------------------- diff --git a/videointelligence/tests/unit/gapic/v1/test_video_intelligence_service_client_v1.py b/videointelligence/tests/unit/gapic/v1/test_video_intelligence_service_client_v1.py index fca6c1e165bd..d34b37cf76b1 100644 --- a/videointelligence/tests/unit/gapic/v1/test_video_intelligence_service_client_v1.py +++ b/videointelligence/tests/unit/gapic/v1/test_video_intelligence_service_client_v1.py @@ -83,17 +83,17 @@ def test_annotate_video(self): client = videointelligence_v1.VideoIntelligenceServiceClient() # Setup Request - input_uri = "gs://cloud-samples-data/video/cat.mp4" features_element = enums.Feature.LABEL_DETECTION features = [features_element] + input_uri = "gs://cloud-samples-data/video/cat.mp4" - response = client.annotate_video(input_uri=input_uri, features=features) + response = client.annotate_video(features, input_uri=input_uri) result = response.result() assert expected_response == result assert len(channel.requests) == 1 expected_request = video_intelligence_pb2.AnnotateVideoRequest( - input_uri=input_uri, features=features + features=features, input_uri=input_uri ) actual_request = channel.requests[0][1] assert expected_request == actual_request @@ -114,10 +114,10 @@ def test_annotate_video_exception(self): client = videointelligence_v1.VideoIntelligenceServiceClient() # Setup Request - input_uri = "gs://cloud-samples-data/video/cat.mp4" features_element = enums.Feature.LABEL_DETECTION features = [features_element] + input_uri = "gs://cloud-samples-data/video/cat.mp4" - response = client.annotate_video(input_uri=input_uri, features=features) + response = client.annotate_video(features, input_uri=input_uri) exception = response.exception() assert exception.errors[0] == error diff --git a/vision/docs/conf.py b/vision/docs/conf.py index a88c250c6fcc..4fc985d1f432 100644 --- a/vision/docs/conf.py +++ b/vision/docs/conf.py @@ -344,7 +344,7 @@ "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("https://requests.kennethreitz.org/en/stable/", None), + "requests": ("https://requests.kennethreitz.org/en/master/", None), "fastavro": ("https://fastavro.readthedocs.io/en/stable/", None), "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), } diff --git a/vision/google/cloud/vision_v1/proto/image_annotator.proto b/vision/google/cloud/vision_v1/proto/image_annotator.proto index 6cff8dde4d51..d2588604605a 100644 --- a/vision/google/cloud/vision_v1/proto/image_annotator.proto +++ b/vision/google/cloud/vision_v1/proto/image_annotator.proto @@ -207,28 +207,6 @@ message Feature { string model = 3; } -// A bucketized representation of likelihood, which is intended to give clients -// highly stable results across model upgrades. -enum Likelihood { - // Unknown likelihood. - UNKNOWN = 0; - - // It is very unlikely. - VERY_UNLIKELY = 1; - - // It is unlikely. - UNLIKELY = 2; - - // It is possible. - POSSIBLE = 3; - - // It is likely. - LIKELY = 4; - - // It is very likely. - VERY_LIKELY = 5; -} - // External image source (Google Cloud Storage or web URL image location). message ImageSource { // **Use `image_uri` instead.** @@ -272,6 +250,28 @@ message Image { ImageSource source = 2; } +// A bucketized representation of likelihood, which is intended to give clients +// highly stable results across model upgrades. +enum Likelihood { + // Unknown likelihood. + UNKNOWN = 0; + + // It is very unlikely. + VERY_UNLIKELY = 1; + + // It is unlikely. + UNLIKELY = 2; + + // It is possible. + POSSIBLE = 3; + + // It is likely. + LIKELY = 4; + + // It is very likely. + VERY_LIKELY = 5; +} + // A face annotation object contains the results of face detection. message FaceAnnotation { // A face-specific landmark (for example, a face feature). diff --git a/vision/google/cloud/vision_v1/proto/product_search.proto b/vision/google/cloud/vision_v1/proto/product_search.proto index bede95be4687..6df694b8b763 100644 --- a/vision/google/cloud/vision_v1/proto/product_search.proto +++ b/vision/google/cloud/vision_v1/proto/product_search.proto @@ -21,6 +21,7 @@ import "google/cloud/vision/v1/geometry.proto"; import "google/cloud/vision/v1/product_search_service.proto"; import "google/protobuf/timestamp.proto"; import "google/api/annotations.proto"; +import "google/api/resource.proto"; option cc_enable_arenas = true; option go_package = "google.golang.org/genproto/googleapis/cloud/vision/v1;vision"; @@ -39,7 +40,9 @@ message ProductSearchParams { // // Format is: // `projects/PROJECT_ID/locations/LOC_ID/productSets/PRODUCT_SET_ID`. - string product_set = 6; + string product_set = 6 [(google.api.resource_reference) = { + type: "vision.googleapis.com/ProductSet" + }]; // The list of product categories to search in. Currently, we only consider // the first category, and either "homegoods-v2", "apparel-v2", "toys-v2", diff --git a/vision/google/cloud/vision_v1/proto/product_search_pb2.py b/vision/google/cloud/vision_v1/proto/product_search_pb2.py index bb3e964bdf40..8797197d765f 100644 --- a/vision/google/cloud/vision_v1/proto/product_search_pb2.py +++ b/vision/google/cloud/vision_v1/proto/product_search_pb2.py @@ -23,6 +23,7 @@ ) from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 DESCRIPTOR = _descriptor.FileDescriptor( @@ -33,13 +34,14 @@ "\n\032com.google.cloud.vision.v1B\022ProductSearchProtoP\001Z"//v1/{parent=projects/*/locations/*}/productSets:\x0bproduct_set\xda\x41!parent,product_set,product_set_id\x12\xb4\x01\n\x0fListProductSets\x12..google.cloud.vision.v1.ListProductSetsRequest\x1a/.google.cloud.vision.v1.ListProductSetsResponse"@\x82\xd3\xe4\x93\x02\x31\x12//v1/{parent=projects/*/locations/*}/productSets\xda\x41\x06parent\x12\xa1\x01\n\rGetProductSet\x12,.google.cloud.vision.v1.GetProductSetRequest\x1a".google.cloud.vision.v1.ProductSet">\x82\xd3\xe4\x93\x02\x31\x12//v1/{name=projects/*/locations/*/productSets/*}\xda\x41\x04name\x12\xd3\x01\n\x10UpdateProductSet\x12/.google.cloud.vision.v1.UpdateProductSetRequest\x1a".google.cloud.vision.v1.ProductSet"j\x82\xd3\xe4\x93\x02J2;/v1/{product_set.name=projects/*/locations/*/productSets/*}:\x0bproduct_set\xda\x41\x17product_set,update_mask\x12\x9b\x01\n\x10\x44\x65leteProductSet\x12/.google.cloud.vision.v1.DeleteProductSetRequest\x1a\x16.google.protobuf.Empty">\x82\xd3\xe4\x93\x02\x31*//v1/{name=projects/*/locations/*/productSets/*}\xda\x41\x04name\x12\xb9\x01\n\rCreateProduct\x12,.google.cloud.vision.v1.CreateProductRequest\x1a\x1f.google.cloud.vision.v1.Product"Y\x82\xd3\xe4\x93\x02\x37",/v1/{parent=projects/*/locations/*}/products:\x07product\xda\x41\x19parent,product,product_id\x12\xa8\x01\n\x0cListProducts\x12+.google.cloud.vision.v1.ListProductsRequest\x1a,.google.cloud.vision.v1.ListProductsResponse"=\x82\xd3\xe4\x93\x02.\x12,/v1/{parent=projects/*/locations/*}/products\xda\x41\x06parent\x12\x95\x01\n\nGetProduct\x12).google.cloud.vision.v1.GetProductRequest\x1a\x1f.google.cloud.vision.v1.Product";\x82\xd3\xe4\x93\x02.\x12,/v1/{name=projects/*/locations/*/products/*}\xda\x41\x04name\x12\xbb\x01\n\rUpdateProduct\x12,.google.cloud.vision.v1.UpdateProductRequest\x1a\x1f.google.cloud.vision.v1.Product"[\x82\xd3\xe4\x93\x02?24/v1/{product.name=projects/*/locations/*/products/*}:\x07product\xda\x41\x13product,update_mask\x12\x92\x01\n\rDeleteProduct\x12,.google.cloud.vision.v1.DeleteProductRequest\x1a\x16.google.protobuf.Empty";\x82\xd3\xe4\x93\x02.*,/v1/{name=projects/*/locations/*/products/*}\xda\x41\x04name\x12\xf9\x01\n\x14\x43reateReferenceImage\x12\x33.google.cloud.vision.v1.CreateReferenceImageRequest\x1a&.google.cloud.vision.v1.ReferenceImage"\x83\x01\x82\xd3\xe4\x93\x02Q">/v1/{parent=projects/*/locations/*/products/*}/referenceImages:\x0freference_image\xda\x41)parent,reference_image,reference_image_id\x12\xb2\x01\n\x14\x44\x65leteReferenceImage\x12\x33.google.cloud.vision.v1.DeleteReferenceImageRequest\x1a\x16.google.protobuf.Empty"M\x82\xd3\xe4\x93\x02@*>/v1/{name=projects/*/locations/*/products/*/referenceImages/*}\xda\x41\x04name\x12\xcf\x01\n\x13ListReferenceImages\x12\x32.google.cloud.vision.v1.ListReferenceImagesRequest\x1a\x33.google.cloud.vision.v1.ListReferenceImagesResponse"O\x82\xd3\xe4\x93\x02@\x12>/v1/{parent=projects/*/locations/*/products/*}/referenceImages\xda\x41\x06parent\x12\xb5\x01\n\x11GetReferenceImage\x12\x30.google.cloud.vision.v1.GetReferenceImageRequest\x1a&.google.cloud.vision.v1.ReferenceImage"F\x82\xd3\xe4\x93\x02@\x12>/v1/{name=projects/*/locations/*/products/*/referenceImages/*}\x12\xbd\x01\n\x16\x41\x64\x64ProductToProductSet\x12\x35.google.cloud.vision.v1.AddProductToProductSetRequest\x1a\x16.google.protobuf.Empty"T\x82\xd3\xe4\x93\x02?":/v1/{name=projects/*/locations/*/productSets/*}:addProduct:\x01*\xda\x41\x0cname,product\x12\xca\x01\n\x1bRemoveProductFromProductSet\x12:.google.cloud.vision.v1.RemoveProductFromProductSetRequest\x1a\x16.google.protobuf.Empty"W\x82\xd3\xe4\x93\x02\x42"=/v1/{name=projects/*/locations/*/productSets/*}:removeProduct:\x01*\xda\x41\x0cname,product\x12\xd6\x01\n\x18ListProductsInProductSet\x12\x37.google.cloud.vision.v1.ListProductsInProductSetRequest\x1a\x38.google.cloud.vision.v1.ListProductsInProductSetResponse"G\x82\xd3\xe4\x93\x02:\x12\x38/v1/{name=projects/*/locations/*/productSets/*}/products\xda\x41\x04name\x12\xf4\x01\n\x11ImportProductSets\x12\x30.google.cloud.vision.v1.ImportProductSetsRequest\x1a\x1d.google.longrunning.Operation"\x8d\x01\x82\xd3\xe4\x93\x02;"6/v1/{parent=projects/*/locations/*}/productSets:import:\x01*\xda\x41\x13parent,input_config\xca\x41\x33\n\x19ImportProductSetsResponse\x12\x16\x42\x61tchOperationMetadata\x12\xd6\x01\n\rPurgeProducts\x12,.google.cloud.vision.v1.PurgeProductsRequest\x1a\x1d.google.longrunning.Operation"x\x82\xd3\xe4\x93\x02\x37"2/v1/{parent=projects/*/locations/*}/products:purge:\x01*\xda\x41\x06parent\xca\x41/\n\x15google.protobuf.Empty\x12\x16\x42\x61tchOperationMetadata\x1av\xca\x41\x15vision.googleapis.com\xd2\x41[https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-visionB\x81\x01\n\x1a\x63om.google.cloud.vision.v1B\x19ProductSearchServiceProtoP\x01Z"//v1/{parent=projects/*/locations/*}/productSets:\x0bproduct_set\xda\x41!parent,product_set,product_set_id\x12\xb4\x01\n\x0fListProductSets\x12..google.cloud.vision.v1.ListProductSetsRequest\x1a/.google.cloud.vision.v1.ListProductSetsResponse"@\x82\xd3\xe4\x93\x02\x31\x12//v1/{parent=projects/*/locations/*}/productSets\xda\x41\x06parent\x12\xa1\x01\n\rGetProductSet\x12,.google.cloud.vision.v1.GetProductSetRequest\x1a".google.cloud.vision.v1.ProductSet">\x82\xd3\xe4\x93\x02\x31\x12//v1/{name=projects/*/locations/*/productSets/*}\xda\x41\x04name\x12\xd3\x01\n\x10UpdateProductSet\x12/.google.cloud.vision.v1.UpdateProductSetRequest\x1a".google.cloud.vision.v1.ProductSet"j\x82\xd3\xe4\x93\x02J2;/v1/{product_set.name=projects/*/locations/*/productSets/*}:\x0bproduct_set\xda\x41\x17product_set,update_mask\x12\x9b\x01\n\x10\x44\x65leteProductSet\x12/.google.cloud.vision.v1.DeleteProductSetRequest\x1a\x16.google.protobuf.Empty">\x82\xd3\xe4\x93\x02\x31*//v1/{name=projects/*/locations/*/productSets/*}\xda\x41\x04name\x12\xb9\x01\n\rCreateProduct\x12,.google.cloud.vision.v1.CreateProductRequest\x1a\x1f.google.cloud.vision.v1.Product"Y\x82\xd3\xe4\x93\x02\x37",/v1/{parent=projects/*/locations/*}/products:\x07product\xda\x41\x19parent,product,product_id\x12\xa8\x01\n\x0cListProducts\x12+.google.cloud.vision.v1.ListProductsRequest\x1a,.google.cloud.vision.v1.ListProductsResponse"=\x82\xd3\xe4\x93\x02.\x12,/v1/{parent=projects/*/locations/*}/products\xda\x41\x06parent\x12\x95\x01\n\nGetProduct\x12).google.cloud.vision.v1.GetProductRequest\x1a\x1f.google.cloud.vision.v1.Product";\x82\xd3\xe4\x93\x02.\x12,/v1/{name=projects/*/locations/*/products/*}\xda\x41\x04name\x12\xbb\x01\n\rUpdateProduct\x12,.google.cloud.vision.v1.UpdateProductRequest\x1a\x1f.google.cloud.vision.v1.Product"[\x82\xd3\xe4\x93\x02?24/v1/{product.name=projects/*/locations/*/products/*}:\x07product\xda\x41\x13product,update_mask\x12\x92\x01\n\rDeleteProduct\x12,.google.cloud.vision.v1.DeleteProductRequest\x1a\x16.google.protobuf.Empty";\x82\xd3\xe4\x93\x02.*,/v1/{name=projects/*/locations/*/products/*}\xda\x41\x04name\x12\xf9\x01\n\x14\x43reateReferenceImage\x12\x33.google.cloud.vision.v1.CreateReferenceImageRequest\x1a&.google.cloud.vision.v1.ReferenceImage"\x83\x01\x82\xd3\xe4\x93\x02Q">/v1/{parent=projects/*/locations/*/products/*}/referenceImages:\x0freference_image\xda\x41)parent,reference_image,reference_image_id\x12\xb2\x01\n\x14\x44\x65leteReferenceImage\x12\x33.google.cloud.vision.v1.DeleteReferenceImageRequest\x1a\x16.google.protobuf.Empty"M\x82\xd3\xe4\x93\x02@*>/v1/{name=projects/*/locations/*/products/*/referenceImages/*}\xda\x41\x04name\x12\xcf\x01\n\x13ListReferenceImages\x12\x32.google.cloud.vision.v1.ListReferenceImagesRequest\x1a\x33.google.cloud.vision.v1.ListReferenceImagesResponse"O\x82\xd3\xe4\x93\x02@\x12>/v1/{parent=projects/*/locations/*/products/*}/referenceImages\xda\x41\x06parent\x12\xbc\x01\n\x11GetReferenceImage\x12\x30.google.cloud.vision.v1.GetReferenceImageRequest\x1a&.google.cloud.vision.v1.ReferenceImage"M\x82\xd3\xe4\x93\x02@\x12>/v1/{name=projects/*/locations/*/products/*/referenceImages/*}\xda\x41\x04name\x12\xbd\x01\n\x16\x41\x64\x64ProductToProductSet\x12\x35.google.cloud.vision.v1.AddProductToProductSetRequest\x1a\x16.google.protobuf.Empty"T\x82\xd3\xe4\x93\x02?":/v1/{name=projects/*/locations/*/productSets/*}:addProduct:\x01*\xda\x41\x0cname,product\x12\xca\x01\n\x1bRemoveProductFromProductSet\x12:.google.cloud.vision.v1.RemoveProductFromProductSetRequest\x1a\x16.google.protobuf.Empty"W\x82\xd3\xe4\x93\x02\x42"=/v1/{name=projects/*/locations/*/productSets/*}:removeProduct:\x01*\xda\x41\x0cname,product\x12\xd6\x01\n\x18ListProductsInProductSet\x12\x37.google.cloud.vision.v1.ListProductsInProductSetRequest\x1a\x38.google.cloud.vision.v1.ListProductsInProductSetResponse"G\x82\xd3\xe4\x93\x02:\x12\x38/v1/{name=projects/*/locations/*/productSets/*}/products\xda\x41\x04name\x12\xf4\x01\n\x11ImportProductSets\x12\x30.google.cloud.vision.v1.ImportProductSetsRequest\x1a\x1d.google.longrunning.Operation"\x8d\x01\x82\xd3\xe4\x93\x02;"6/v1/{parent=projects/*/locations/*}/productSets:import:\x01*\xda\x41\x13parent,input_config\xca\x41\x33\n\x19ImportProductSetsResponse\x12\x16\x42\x61tchOperationMetadata\x12\xd6\x01\n\rPurgeProducts\x12,.google.cloud.vision.v1.PurgeProductsRequest\x1a\x1d.google.longrunning.Operation"x\x82\xd3\xe4\x93\x02\x37"2/v1/{parent=projects/*/locations/*}/products:purge:\x01*\xda\x41\x06parent\xca\x41/\n\x15google.protobuf.Empty\x12\x16\x42\x61tchOperationMetadata\x1av\xca\x41\x15vision.googleapis.com\xd2\x41[https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/cloud-visionB\x81\x01\n\x1a\x63om.google.cloud.vision.v1B\x19ProductSearchServiceProtoP\x01Z/v1/{name=projects/*/locations/*/products/*/referenceImages/*}" + "\202\323\344\223\002@\022>/v1/{name=projects/*/locations/*/products/*/referenceImages/*}\332A\004name" ), ), _descriptor.MethodDescriptor( diff --git a/vision/synth.metadata b/vision/synth.metadata index a57547dc7f78..04fdeb1da711 100644 --- a/vision/synth.metadata +++ b/vision/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-09-14T12:43:40.716183Z", + "updateTime": "2019-10-10T12:48:24.197145Z", "sources": [ { "generator": { "name": "artman", - "version": "0.36.2", - "dockerImage": "googleapis/artman@sha256:0e6f3a668cd68afc768ecbe08817cf6e56a0e64fcbdb1c58c3b97492d12418a1" + "version": "0.38.0", + "dockerImage": "googleapis/artman@sha256:0d2f8d429110aeb8d82df6550ef4ede59d40df9062d260a1580fce688b0512bf" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "6b2ba2ae3124c22ecb56af7102c78110b8576671", - "internalRef": "268974829" + "sha": "10f91fa12f70e8e0209a45fc10807ed1f77c7e4e", + "internalRef": "273826591" } }, { diff --git a/webrisk/docs/conf.py b/webrisk/docs/conf.py index 0f261df6f20a..0b6fd1a76b85 100644 --- a/webrisk/docs/conf.py +++ b/webrisk/docs/conf.py @@ -264,7 +264,7 @@ u"google-cloud-webrisk Documentation", author, "manual", - ) + ), ] # The name of an image file (relative to this directory) to place at the top of @@ -320,7 +320,7 @@ "google-cloud-webrisk", "GAPIC library for the {metadata.shortName} v1beta1 service", "APIs", - ) + ), ] # Documents to append as an appendix to all manuals. @@ -344,7 +344,7 @@ "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("https://requests.kennethreitz.org/en/stable/", None), + "requests": ("https://requests.kennethreitz.org/en/master/", None), "fastavro": ("https://fastavro.readthedocs.io/en/stable/", None), "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), } diff --git a/webrisk/google/cloud/webrisk.py b/webrisk/google/cloud/webrisk.py index 6b9be6efb038..5ed7f8340456 100644 --- a/webrisk/google/cloud/webrisk.py +++ b/webrisk/google/cloud/webrisk.py @@ -22,4 +22,8 @@ from google.cloud.webrisk_v1beta1 import types -__all__ = ("enums", "types", "WebRiskServiceV1Beta1Client") +__all__ = ( + "enums", + "types", + "WebRiskServiceV1Beta1Client", +) diff --git a/webrisk/google/cloud/webrisk_v1beta1/__init__.py b/webrisk/google/cloud/webrisk_v1beta1/__init__.py index a02bc357affc..823fd3656c38 100644 --- a/webrisk/google/cloud/webrisk_v1beta1/__init__.py +++ b/webrisk/google/cloud/webrisk_v1beta1/__init__.py @@ -29,4 +29,8 @@ class WebRiskServiceV1Beta1Client( enums = enums -__all__ = ("enums", "types", "WebRiskServiceV1Beta1Client") +__all__ = ( + "enums", + "types", + "WebRiskServiceV1Beta1Client", +) diff --git a/webrisk/google/cloud/webrisk_v1beta1/gapic/transports/web_risk_service_v1_beta1_grpc_transport.py b/webrisk/google/cloud/webrisk_v1beta1/gapic/transports/web_risk_service_v1_beta1_grpc_transport.py index 773c0afc9601..8e78eafa37b0 100644 --- a/webrisk/google/cloud/webrisk_v1beta1/gapic/transports/web_risk_service_v1_beta1_grpc_transport.py +++ b/webrisk/google/cloud/webrisk_v1beta1/gapic/transports/web_risk_service_v1_beta1_grpc_transport.py @@ -53,7 +53,7 @@ def __init__( # exception (channels come with credentials baked in already). if channel is not None and credentials is not None: raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive." + "The `channel` and `credentials` arguments are mutually " "exclusive.", ) # Create the channel. @@ -74,7 +74,7 @@ def __init__( self._stubs = { "web_risk_service_v1_beta1_stub": webrisk_pb2_grpc.WebRiskServiceV1Beta1Stub( channel - ) + ), } @classmethod diff --git a/webrisk/google/cloud/webrisk_v1beta1/gapic/web_risk_service_v1_beta1_client.py b/webrisk/google/cloud/webrisk_v1beta1/gapic/web_risk_service_v1_beta1_client.py index dfe02613188a..66e5179318f5 100644 --- a/webrisk/google/cloud/webrisk_v1beta1/gapic/web_risk_service_v1_beta1_client.py +++ b/webrisk/google/cloud/webrisk_v1beta1/gapic/web_risk_service_v1_beta1_client.py @@ -36,7 +36,7 @@ from google.cloud.webrisk_v1beta1.proto import webrisk_pb2_grpc -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-webrisk").version +_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-webrisk",).version class WebRiskServiceV1Beta1Client(object): @@ -159,12 +159,12 @@ def __init__( self.transport = transport else: self.transport = web_risk_service_v1_beta1_grpc_transport.WebRiskServiceV1Beta1GrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials + address=api_endpoint, channel=channel, credentials=credentials, ) if client_info is None: client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION + gapic_version=_GAPIC_LIBRARY_VERSION, ) else: client_info.gapic_version = _GAPIC_LIBRARY_VERSION @@ -175,7 +175,7 @@ def __init__( # (Ordinarily, these are the defaults specified in the `*_config.py` # file next to this one.) self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config["interfaces"][self._INTERFACE_NAME] + client_config["interfaces"][self._INTERFACE_NAME], ) # Save a dictionary of cached API call functions. @@ -212,8 +212,8 @@ def compute_threat_list_diff( >>> response = client.compute_threat_list_diff(threat_type, constraints) Args: - threat_type (~google.cloud.webrisk_v1beta1.types.ThreatType): Required. The ThreatList to update. - constraints (Union[dict, ~google.cloud.webrisk_v1beta1.types.Constraints]): The constraints associated with this request. + threat_type (~google.cloud.webrisk_v1beta1.types.ThreatType): The ThreatList to update. + constraints (Union[dict, ~google.cloud.webrisk_v1beta1.types.Constraints]): Required. The constraints associated with this request. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.webrisk_v1beta1.types.Constraints` @@ -284,7 +284,7 @@ def search_uris( >>> response = client.search_uris(uri, threat_types) Args: - uri (str): The URI to be checked for matches. + uri (str): Required. The URI to be checked for matches. threat_types (list[~google.cloud.webrisk_v1beta1.types.ThreatType]): Required. The ThreatLists to search in. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will @@ -316,7 +316,7 @@ def search_uris( client_info=self._client_info, ) - request = webrisk_pb2.SearchUrisRequest(uri=uri, threat_types=threat_types) + request = webrisk_pb2.SearchUrisRequest(uri=uri, threat_types=threat_types,) return self._inner_api_calls["search_uris"]( request, retry=retry, timeout=timeout, metadata=metadata ) @@ -382,7 +382,7 @@ def search_hashes( ) request = webrisk_pb2.SearchHashesRequest( - threat_types=threat_types, hash_prefix=hash_prefix + threat_types=threat_types, hash_prefix=hash_prefix, ) return self._inner_api_calls["search_hashes"]( request, retry=retry, timeout=timeout, metadata=metadata diff --git a/webrisk/google/cloud/webrisk_v1beta1/proto/webrisk.proto b/webrisk/google/cloud/webrisk_v1beta1/proto/webrisk.proto index c6b5f7c90ce8..7f8021d16b3e 100644 --- a/webrisk/google/cloud/webrisk_v1beta1/proto/webrisk.proto +++ b/webrisk/google/cloud/webrisk_v1beta1/proto/webrisk.proto @@ -18,6 +18,8 @@ syntax = "proto3"; package google.cloud.webrisk.v1beta1; import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; import "google/protobuf/timestamp.proto"; option csharp_namespace = "Google.Cloud.WebRisk.V1Beta1"; @@ -31,12 +33,15 @@ option php_namespace = "Google\\Cloud\\WebRisk\\V1beta1"; // Web Risk v1beta1 API defines an interface to detect malicious URLs on your // website and in client applications. service WebRiskServiceV1Beta1 { + option (google.api.default_host) = "webrisk.googleapis.com"; + option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; + // Gets the most recent threat list diffs. - rpc ComputeThreatListDiff(ComputeThreatListDiffRequest) - returns (ComputeThreatListDiffResponse) { + rpc ComputeThreatListDiff(ComputeThreatListDiffRequest) returns (ComputeThreatListDiffResponse) { option (google.api.http) = { get: "/v1beta1/threatLists:computeDiff" }; + option (google.api.method_signature) = "threat_type,version_token,constraints"; } // This method is used to check whether a URI is on a given threatList. @@ -44,6 +49,7 @@ service WebRiskServiceV1Beta1 { option (google.api.http) = { get: "/v1beta1/uris:search" }; + option (google.api.method_signature) = "uri,threat_types"; } // Gets the full hashes that match the requested hash prefix. @@ -55,6 +61,7 @@ service WebRiskServiceV1Beta1 { option (google.api.http) = { get: "/v1beta1/hashes:search" }; + option (google.api.method_signature) = "hash_prefix,threat_types"; } } @@ -76,15 +83,15 @@ message ComputeThreatListDiffRequest { repeated CompressionType supported_compressions = 3; } - // Required. The ThreatList to update. - ThreatType threat_type = 1; + // The ThreatList to update. + ThreatType threat_type = 1 [(google.api.field_behavior) = REQUIRED]; // The current version token of the client for the requested list (the // client version that was received from the last successful diff). bytes version_token = 2; - // The constraints associated with this request. - Constraints constraints = 3; + // Required. The constraints associated with this request. + Constraints constraints = 3 [(google.api.field_behavior) = REQUIRED]; } message ComputeThreatListDiffResponse { @@ -109,7 +116,7 @@ message ComputeThreatListDiffResponse { RESET = 2; } - // The type of response. This may indicate that an action is required by the + // The type of response. This may indicate that an action must be taken by the // client when the response is received. ResponseType response_type = 4; @@ -138,11 +145,11 @@ message ComputeThreatListDiffResponse { // Request to check URI entries against threatLists. message SearchUrisRequest { - // The URI to be checked for matches. - string uri = 1; + // Required. The URI to be checked for matches. + string uri = 1 [(google.api.field_behavior) = REQUIRED]; // Required. The ThreatLists to search in. - repeated ThreatType threat_types = 2; + repeated ThreatType threat_types = 2 [(google.api.field_behavior) = REQUIRED]; } message SearchUrisResponse { @@ -167,7 +174,7 @@ message SearchHashesRequest { bytes hash_prefix = 1; // Required. The ThreatLists to search in. - repeated ThreatType threat_types = 2; + repeated ThreatType threat_types = 2 [(google.api.field_behavior) = REQUIRED]; } message SearchHashesResponse { diff --git a/webrisk/google/cloud/webrisk_v1beta1/proto/webrisk_pb2.py b/webrisk/google/cloud/webrisk_v1beta1/proto/webrisk_pb2.py index 7b61ac5aa53a..c4e7f392a921 100644 --- a/webrisk/google/cloud/webrisk_v1beta1/proto/webrisk_pb2.py +++ b/webrisk/google/cloud/webrisk_v1beta1/proto/webrisk_pb2.py @@ -17,6 +17,8 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 @@ -28,10 +30,12 @@ "\n\032com.google.webrisk.v1beta1B\014WebRiskProtoP\001ZCgoogle.golang.org/genproto/googleapis/cloud/webrisk/v1beta1;webrisk\242\002\004GCWR\252\002\034Google.Cloud.WebRisk.V1Beta1\312\002\034Google\\Cloud\\WebRisk\\V1beta1" ), serialized_pb=_b( - '\n0google/cloud/webrisk_v1beta1/proto/webrisk.proto\x12\x1cgoogle.cloud.webrisk.v1beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xe8\x02\n\x1c\x43omputeThreatListDiffRequest\x12=\n\x0bthreat_type\x18\x01 \x01(\x0e\x32(.google.cloud.webrisk.v1beta1.ThreatType\x12\x15\n\rversion_token\x18\x02 \x01(\x0c\x12[\n\x0b\x63onstraints\x18\x03 \x01(\x0b\x32\x46.google.cloud.webrisk.v1beta1.ComputeThreatListDiffRequest.Constraints\x1a\x94\x01\n\x0b\x43onstraints\x12\x18\n\x10max_diff_entries\x18\x01 \x01(\x05\x12\x1c\n\x14max_database_entries\x18\x02 \x01(\x05\x12M\n\x16supported_compressions\x18\x03 \x03(\x0e\x32-.google.cloud.webrisk.v1beta1.CompressionType"\x9a\x04\n\x1d\x43omputeThreatListDiffResponse\x12_\n\rresponse_type\x18\x04 \x01(\x0e\x32H.google.cloud.webrisk.v1beta1.ComputeThreatListDiffResponse.ResponseType\x12\x45\n\tadditions\x18\x05 \x01(\x0b\x32\x32.google.cloud.webrisk.v1beta1.ThreatEntryAdditions\x12\x43\n\x08removals\x18\x06 \x01(\x0b\x32\x31.google.cloud.webrisk.v1beta1.ThreatEntryRemovals\x12\x19\n\x11new_version_token\x18\x07 \x01(\x0c\x12V\n\x08\x63hecksum\x18\x08 \x01(\x0b\x32\x44.google.cloud.webrisk.v1beta1.ComputeThreatListDiffResponse.Checksum\x12\x39\n\x15recommended_next_diff\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x1a\x1a\n\x08\x43hecksum\x12\x0e\n\x06sha256\x18\x01 \x01(\x0c"B\n\x0cResponseType\x12\x1d\n\x19RESPONSE_TYPE_UNSPECIFIED\x10\x00\x12\x08\n\x04\x44IFF\x10\x01\x12\t\n\x05RESET\x10\x02"`\n\x11SearchUrisRequest\x12\x0b\n\x03uri\x18\x01 \x01(\t\x12>\n\x0cthreat_types\x18\x02 \x03(\x0e\x32(.google.cloud.webrisk.v1beta1.ThreatType"\xde\x01\n\x12SearchUrisResponse\x12J\n\x06threat\x18\x01 \x01(\x0b\x32:.google.cloud.webrisk.v1beta1.SearchUrisResponse.ThreatUri\x1a|\n\tThreatUri\x12>\n\x0cthreat_types\x18\x01 \x03(\x0e\x32(.google.cloud.webrisk.v1beta1.ThreatType\x12/\n\x0b\x65xpire_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"j\n\x13SearchHashesRequest\x12\x13\n\x0bhash_prefix\x18\x01 \x01(\x0c\x12>\n\x0cthreat_types\x18\x02 \x03(\x0e\x32(.google.cloud.webrisk.v1beta1.ThreatType"\xae\x02\n\x14SearchHashesResponse\x12N\n\x07threats\x18\x01 \x03(\x0b\x32=.google.cloud.webrisk.v1beta1.SearchHashesResponse.ThreatHash\x12\x38\n\x14negative_expire_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x1a\x8b\x01\n\nThreatHash\x12>\n\x0cthreat_types\x18\x01 \x03(\x0e\x32(.google.cloud.webrisk.v1beta1.ThreatType\x12\x0c\n\x04hash\x18\x02 \x01(\x0c\x12/\n\x0b\x65xpire_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"\x99\x01\n\x14ThreatEntryAdditions\x12;\n\nraw_hashes\x18\x01 \x03(\x0b\x32\'.google.cloud.webrisk.v1beta1.RawHashes\x12\x44\n\x0brice_hashes\x18\x02 \x01(\x0b\x32/.google.cloud.webrisk.v1beta1.RiceDeltaEncoding"\x9b\x01\n\x13ThreatEntryRemovals\x12=\n\x0braw_indices\x18\x01 \x01(\x0b\x32(.google.cloud.webrisk.v1beta1.RawIndices\x12\x45\n\x0crice_indices\x18\x02 \x01(\x0b\x32/.google.cloud.webrisk.v1beta1.RiceDeltaEncoding"\x1d\n\nRawIndices\x12\x0f\n\x07indices\x18\x01 \x03(\x05"4\n\tRawHashes\x12\x13\n\x0bprefix_size\x18\x01 \x01(\x05\x12\x12\n\nraw_hashes\x18\x02 \x01(\x0c"k\n\x11RiceDeltaEncoding\x12\x13\n\x0b\x66irst_value\x18\x01 \x01(\x03\x12\x16\n\x0erice_parameter\x18\x02 \x01(\x05\x12\x13\n\x0b\x65ntry_count\x18\x03 \x01(\x05\x12\x14\n\x0c\x65ncoded_data\x18\x04 \x01(\x0c*e\n\nThreatType\x12\x1b\n\x17THREAT_TYPE_UNSPECIFIED\x10\x00\x12\x0b\n\x07MALWARE\x10\x01\x12\x16\n\x12SOCIAL_ENGINEERING\x10\x02\x12\x15\n\x11UNWANTED_SOFTWARE\x10\x03*F\n\x0f\x43ompressionType\x12 \n\x1c\x43OMPRESSION_TYPE_UNSPECIFIED\x10\x00\x12\x07\n\x03RAW\x10\x01\x12\x08\n\x04RICE\x10\x02\x32\xfc\x03\n\x15WebRiskServiceV1Beta1\x12\xba\x01\n\x15\x43omputeThreatListDiff\x12:.google.cloud.webrisk.v1beta1.ComputeThreatListDiffRequest\x1a;.google.cloud.webrisk.v1beta1.ComputeThreatListDiffResponse"(\x82\xd3\xe4\x93\x02"\x12 /v1beta1/threatLists:computeDiff\x12\x8d\x01\n\nSearchUris\x12/.google.cloud.webrisk.v1beta1.SearchUrisRequest\x1a\x30.google.cloud.webrisk.v1beta1.SearchUrisResponse"\x1c\x82\xd3\xe4\x93\x02\x16\x12\x14/v1beta1/uris:search\x12\x95\x01\n\x0cSearchHashes\x12\x31.google.cloud.webrisk.v1beta1.SearchHashesRequest\x1a\x32.google.cloud.webrisk.v1beta1.SearchHashesResponse"\x1e\x82\xd3\xe4\x93\x02\x18\x12\x16/v1beta1/hashes:searchB\xb6\x01\n\x1a\x63om.google.webrisk.v1beta1B\x0cWebRiskProtoP\x01ZCgoogle.golang.org/genproto/googleapis/cloud/webrisk/v1beta1;webrisk\xa2\x02\x04GCWR\xaa\x02\x1cGoogle.Cloud.WebRisk.V1Beta1\xca\x02\x1cGoogle\\Cloud\\WebRisk\\V1beta1b\x06proto3' + '\n0google/cloud/webrisk_v1beta1/proto/webrisk.proto\x12\x1cgoogle.cloud.webrisk.v1beta1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xf2\x02\n\x1c\x43omputeThreatListDiffRequest\x12\x42\n\x0bthreat_type\x18\x01 \x01(\x0e\x32(.google.cloud.webrisk.v1beta1.ThreatTypeB\x03\xe0\x41\x02\x12\x15\n\rversion_token\x18\x02 \x01(\x0c\x12`\n\x0b\x63onstraints\x18\x03 \x01(\x0b\x32\x46.google.cloud.webrisk.v1beta1.ComputeThreatListDiffRequest.ConstraintsB\x03\xe0\x41\x02\x1a\x94\x01\n\x0b\x43onstraints\x12\x18\n\x10max_diff_entries\x18\x01 \x01(\x05\x12\x1c\n\x14max_database_entries\x18\x02 \x01(\x05\x12M\n\x16supported_compressions\x18\x03 \x03(\x0e\x32-.google.cloud.webrisk.v1beta1.CompressionType"\x9a\x04\n\x1d\x43omputeThreatListDiffResponse\x12_\n\rresponse_type\x18\x04 \x01(\x0e\x32H.google.cloud.webrisk.v1beta1.ComputeThreatListDiffResponse.ResponseType\x12\x45\n\tadditions\x18\x05 \x01(\x0b\x32\x32.google.cloud.webrisk.v1beta1.ThreatEntryAdditions\x12\x43\n\x08removals\x18\x06 \x01(\x0b\x32\x31.google.cloud.webrisk.v1beta1.ThreatEntryRemovals\x12\x19\n\x11new_version_token\x18\x07 \x01(\x0c\x12V\n\x08\x63hecksum\x18\x08 \x01(\x0b\x32\x44.google.cloud.webrisk.v1beta1.ComputeThreatListDiffResponse.Checksum\x12\x39\n\x15recommended_next_diff\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x1a\x1a\n\x08\x43hecksum\x12\x0e\n\x06sha256\x18\x01 \x01(\x0c"B\n\x0cResponseType\x12\x1d\n\x19RESPONSE_TYPE_UNSPECIFIED\x10\x00\x12\x08\n\x04\x44IFF\x10\x01\x12\t\n\x05RESET\x10\x02"j\n\x11SearchUrisRequest\x12\x10\n\x03uri\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x43\n\x0cthreat_types\x18\x02 \x03(\x0e\x32(.google.cloud.webrisk.v1beta1.ThreatTypeB\x03\xe0\x41\x02"\xde\x01\n\x12SearchUrisResponse\x12J\n\x06threat\x18\x01 \x01(\x0b\x32:.google.cloud.webrisk.v1beta1.SearchUrisResponse.ThreatUri\x1a|\n\tThreatUri\x12>\n\x0cthreat_types\x18\x01 \x03(\x0e\x32(.google.cloud.webrisk.v1beta1.ThreatType\x12/\n\x0b\x65xpire_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"o\n\x13SearchHashesRequest\x12\x13\n\x0bhash_prefix\x18\x01 \x01(\x0c\x12\x43\n\x0cthreat_types\x18\x02 \x03(\x0e\x32(.google.cloud.webrisk.v1beta1.ThreatTypeB\x03\xe0\x41\x02"\xae\x02\n\x14SearchHashesResponse\x12N\n\x07threats\x18\x01 \x03(\x0b\x32=.google.cloud.webrisk.v1beta1.SearchHashesResponse.ThreatHash\x12\x38\n\x14negative_expire_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x1a\x8b\x01\n\nThreatHash\x12>\n\x0cthreat_types\x18\x01 \x03(\x0e\x32(.google.cloud.webrisk.v1beta1.ThreatType\x12\x0c\n\x04hash\x18\x02 \x01(\x0c\x12/\n\x0b\x65xpire_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"\x99\x01\n\x14ThreatEntryAdditions\x12;\n\nraw_hashes\x18\x01 \x03(\x0b\x32\'.google.cloud.webrisk.v1beta1.RawHashes\x12\x44\n\x0brice_hashes\x18\x02 \x01(\x0b\x32/.google.cloud.webrisk.v1beta1.RiceDeltaEncoding"\x9b\x01\n\x13ThreatEntryRemovals\x12=\n\x0braw_indices\x18\x01 \x01(\x0b\x32(.google.cloud.webrisk.v1beta1.RawIndices\x12\x45\n\x0crice_indices\x18\x02 \x01(\x0b\x32/.google.cloud.webrisk.v1beta1.RiceDeltaEncoding"\x1d\n\nRawIndices\x12\x0f\n\x07indices\x18\x01 \x03(\x05"4\n\tRawHashes\x12\x13\n\x0bprefix_size\x18\x01 \x01(\x05\x12\x12\n\nraw_hashes\x18\x02 \x01(\x0c"k\n\x11RiceDeltaEncoding\x12\x13\n\x0b\x66irst_value\x18\x01 \x01(\x03\x12\x16\n\x0erice_parameter\x18\x02 \x01(\x05\x12\x13\n\x0b\x65ntry_count\x18\x03 \x01(\x05\x12\x14\n\x0c\x65ncoded_data\x18\x04 \x01(\x0c*e\n\nThreatType\x12\x1b\n\x17THREAT_TYPE_UNSPECIFIED\x10\x00\x12\x0b\n\x07MALWARE\x10\x01\x12\x16\n\x12SOCIAL_ENGINEERING\x10\x02\x12\x15\n\x11UNWANTED_SOFTWARE\x10\x03*F\n\x0f\x43ompressionType\x12 \n\x1c\x43OMPRESSION_TYPE_UNSPECIFIED\x10\x00\x12\x07\n\x03RAW\x10\x01\x12\x08\n\x04RICE\x10\x02\x32\x9e\x05\n\x15WebRiskServiceV1Beta1\x12\xe2\x01\n\x15\x43omputeThreatListDiff\x12:.google.cloud.webrisk.v1beta1.ComputeThreatListDiffRequest\x1a;.google.cloud.webrisk.v1beta1.ComputeThreatListDiffResponse"P\x82\xd3\xe4\x93\x02"\x12 /v1beta1/threatLists:computeDiff\xda\x41%threat_type,version_token,constraints\x12\xa0\x01\n\nSearchUris\x12/.google.cloud.webrisk.v1beta1.SearchUrisRequest\x1a\x30.google.cloud.webrisk.v1beta1.SearchUrisResponse"/\x82\xd3\xe4\x93\x02\x16\x12\x14/v1beta1/uris:search\xda\x41\x10uri,threat_types\x12\xb0\x01\n\x0cSearchHashes\x12\x31.google.cloud.webrisk.v1beta1.SearchHashesRequest\x1a\x32.google.cloud.webrisk.v1beta1.SearchHashesResponse"9\x82\xd3\xe4\x93\x02\x18\x12\x16/v1beta1/hashes:search\xda\x41\x18hash_prefix,threat_types\x1aJ\xca\x41\x16webrisk.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformB\xb6\x01\n\x1a\x63om.google.webrisk.v1beta1B\x0cWebRiskProtoP\x01ZCgoogle.golang.org/genproto/googleapis/cloud/webrisk/v1beta1;webrisk\xa2\x02\x04GCWR\xaa\x02\x1cGoogle.Cloud.WebRisk.V1Beta1\xca\x02\x1cGoogle\\Cloud\\WebRisk\\V1beta1b\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_client__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, ], ) @@ -69,8 +73,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=2293, - serialized_end=2394, + serialized_start=2376, + serialized_end=2477, ) _sym_db.RegisterEnumDescriptor(_THREATTYPE) @@ -97,8 +101,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=2396, - serialized_end=2466, + serialized_start=2479, + serialized_end=2549, ) _sym_db.RegisterEnumDescriptor(_COMPRESSIONTYPE) @@ -134,8 +138,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=981, - serialized_end=1047, + serialized_start=1049, + serialized_end=1115, ) _sym_db.RegisterEnumDescriptor(_COMPUTETHREATLISTDIFFRESPONSE_RESPONSETYPE) @@ -210,8 +214,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=358, - serialized_end=506, + serialized_start=426, + serialized_end=574, ) _COMPUTETHREATLISTDIFFREQUEST = _descriptor.Descriptor( @@ -236,7 +240,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -272,20 +276,20 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], extensions=[], - nested_types=[_COMPUTETHREATLISTDIFFREQUEST_CONSTRAINTS], + nested_types=[_COMPUTETHREATLISTDIFFREQUEST_CONSTRAINTS,], enum_types=[], serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=146, - serialized_end=506, + serialized_start=204, + serialized_end=574, ) @@ -313,7 +317,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -323,8 +327,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=953, - serialized_end=979, + serialized_start=1021, + serialized_end=1047, ) _COMPUTETHREATLISTDIFFRESPONSE = _descriptor.Descriptor( @@ -444,15 +448,15 @@ ), ], extensions=[], - nested_types=[_COMPUTETHREATLISTDIFFRESPONSE_CHECKSUM], - enum_types=[_COMPUTETHREATLISTDIFFRESPONSE_RESPONSETYPE], + nested_types=[_COMPUTETHREATLISTDIFFRESPONSE_CHECKSUM,], + enum_types=[_COMPUTETHREATLISTDIFFRESPONSE_RESPONSETYPE,], serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=509, - serialized_end=1047, + serialized_start=577, + serialized_end=1115, ) @@ -478,7 +482,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -496,7 +500,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -508,8 +512,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1049, - serialized_end=1145, + serialized_start=1117, + serialized_end=1223, ) @@ -565,8 +569,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1246, - serialized_end=1370, + serialized_start=1324, + serialized_end=1448, ) _SEARCHURISRESPONSE = _descriptor.Descriptor( @@ -593,18 +597,18 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], - nested_types=[_SEARCHURISRESPONSE_THREATURI], + nested_types=[_SEARCHURISRESPONSE_THREATURI,], enum_types=[], serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1148, - serialized_end=1370, + serialized_start=1226, + serialized_end=1448, ) @@ -648,7 +652,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -660,8 +664,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1372, - serialized_end=1478, + serialized_start=1450, + serialized_end=1561, ) @@ -735,8 +739,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1644, - serialized_end=1783, + serialized_start=1727, + serialized_end=1866, ) _SEARCHHASHESRESPONSE = _descriptor.Descriptor( @@ -784,15 +788,15 @@ ), ], extensions=[], - nested_types=[_SEARCHHASHESRESPONSE_THREATHASH], + nested_types=[_SEARCHHASHESRESPONSE_THREATHASH,], enum_types=[], serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1481, - serialized_end=1783, + serialized_start=1564, + serialized_end=1866, ) @@ -848,8 +852,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1786, - serialized_end=1939, + serialized_start=1869, + serialized_end=2022, ) @@ -905,8 +909,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1942, - serialized_end=2097, + serialized_start=2025, + serialized_end=2180, ) @@ -934,7 +938,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + ), ], extensions=[], nested_types=[], @@ -944,8 +948,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2099, - serialized_end=2128, + serialized_start=2182, + serialized_end=2211, ) @@ -1001,8 +1005,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2130, - serialized_end=2182, + serialized_start=2213, + serialized_end=2265, ) @@ -1094,8 +1098,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2184, - serialized_end=2291, + serialized_start=2267, + serialized_end=2374, ) _COMPUTETHREATLISTDIFFREQUEST_CONSTRAINTS.fields_by_name[ @@ -1208,13 +1212,13 @@ Attributes: threat_type: - Required. The ThreatList to update. + The ThreatList to update. version_token: The current version token of the client for the requested list (the client version that was received from the last successful diff). constraints: - The constraints associated with this request. + Required. The constraints associated with this request. """, # @@protoc_insertion_point(class_scope:google.cloud.webrisk.v1beta1.ComputeThreatListDiffRequest) ), @@ -1248,8 +1252,8 @@ __doc__=""" Attributes: response_type: - The type of response. This may indicate that an action is - required by the client when the response is received. + The type of response. This may indicate that an action must be + taken by the client when the response is received. additions: A set of entries to add to a local threat type's list. removals: @@ -1287,7 +1291,7 @@ Attributes: uri: - The URI to be checked for matches. + Required. The URI to be checked for matches. threat_types: Required. The ThreatLists to search in. """, @@ -1533,15 +1537,22 @@ DESCRIPTOR._options = None +_COMPUTETHREATLISTDIFFREQUEST.fields_by_name["threat_type"]._options = None +_COMPUTETHREATLISTDIFFREQUEST.fields_by_name["constraints"]._options = None +_SEARCHURISREQUEST.fields_by_name["uri"]._options = None +_SEARCHURISREQUEST.fields_by_name["threat_types"]._options = None +_SEARCHHASHESREQUEST.fields_by_name["threat_types"]._options = None _WEBRISKSERVICEV1BETA1 = _descriptor.ServiceDescriptor( name="WebRiskServiceV1Beta1", full_name="google.cloud.webrisk.v1beta1.WebRiskServiceV1Beta1", file=DESCRIPTOR, index=0, - serialized_options=None, - serialized_start=2469, - serialized_end=2977, + serialized_options=_b( + "\312A\026webrisk.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform" + ), + serialized_start=2552, + serialized_end=3222, methods=[ _descriptor.MethodDescriptor( name="ComputeThreatListDiff", @@ -1551,7 +1562,7 @@ input_type=_COMPUTETHREATLISTDIFFREQUEST, output_type=_COMPUTETHREATLISTDIFFRESPONSE, serialized_options=_b( - '\202\323\344\223\002"\022 /v1beta1/threatLists:computeDiff' + '\202\323\344\223\002"\022 /v1beta1/threatLists:computeDiff\332A%threat_type,version_token,constraints' ), ), _descriptor.MethodDescriptor( @@ -1562,7 +1573,7 @@ input_type=_SEARCHURISREQUEST, output_type=_SEARCHURISRESPONSE, serialized_options=_b( - "\202\323\344\223\002\026\022\024/v1beta1/uris:search" + "\202\323\344\223\002\026\022\024/v1beta1/uris:search\332A\020uri,threat_types" ), ), _descriptor.MethodDescriptor( @@ -1573,7 +1584,7 @@ input_type=_SEARCHHASHESREQUEST, output_type=_SEARCHHASHESRESPONSE, serialized_options=_b( - "\202\323\344\223\002\030\022\026/v1beta1/hashes:search" + "\202\323\344\223\002\030\022\026/v1beta1/hashes:search\332A\030hash_prefix,threat_types" ), ), ], diff --git a/webrisk/google/cloud/webrisk_v1beta1/types.py b/webrisk/google/cloud/webrisk_v1beta1/types.py index 81d2da5f6dd6..28ce703af790 100644 --- a/webrisk/google/cloud/webrisk_v1beta1/types.py +++ b/webrisk/google/cloud/webrisk_v1beta1/types.py @@ -24,9 +24,13 @@ from google.protobuf import timestamp_pb2 -_shared_modules = [timestamp_pb2] +_shared_modules = [ + timestamp_pb2, +] -_local_modules = [webrisk_pb2] +_local_modules = [ + webrisk_pb2, +] names = [] diff --git a/webrisk/synth.metadata b/webrisk/synth.metadata index 318c21b03427..1477bbdba2f5 100644 --- a/webrisk/synth.metadata +++ b/webrisk/synth.metadata @@ -1,25 +1,26 @@ { - "updateTime": "2019-09-10T12:41:37.939470Z", + "updateTime": "2019-10-29T12:49:55.063315Z", "sources": [ { "generator": { "name": "artman", - "version": "0.36.2", - "dockerImage": "googleapis/artman@sha256:0e6f3a668cd68afc768ecbe08817cf6e56a0e64fcbdb1c58c3b97492d12418a1" + "version": "0.40.3", + "dockerImage": "googleapis/artman@sha256:c805f50525f5f557886c94ab76f56eaa09cb1da58c3ee95111fd34259376621a" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "26e189ad03ba63591fb26eecb6aaade7ad39f57a" + "sha": "532773acbed8d09451dafb3d403ab1823e6a6e1e", + "internalRef": "277177415" } }, { "template": { "name": "python_library", "origin": "synthtool.gcp", - "version": "2019.5.2" + "version": "2019.10.17" } } ], diff --git a/websecurityscanner/google/cloud/websecurityscanner_v1alpha/gapic/enums.py b/websecurityscanner/google/cloud/websecurityscanner_v1alpha/gapic/enums.py index 9765b3b06ce8..aef4f2f5dd71 100644 --- a/websecurityscanner/google/cloud/websecurityscanner_v1alpha/gapic/enums.py +++ b/websecurityscanner/google/cloud/websecurityscanner_v1alpha/gapic/enums.py @@ -54,6 +54,13 @@ class FindingType(enum.IntEnum): https://www.google.com/about/appsecurity/learning/xss/. CLEAR_TEXT_PASSWORD (int): An application appears to be transmitting a password field in clear text. An attacker can eavesdrop network traffic and sniff the password field. + INVALID_CONTENT_TYPE (int): An application returns sensitive content with an invalid content type, + or without an 'X-Content-Type-Options: nosniff' header. + XSS_ANGULAR_CALLBACK (int): A cross-site scripting (XSS) vulnerability in AngularJS module that + occurs when a user-provided string is interpolated by Angular. + INVALID_HEADER (int): A malformed or invalid valued header. + MISSPELLED_SECURITY_HEADER_NAME (int): Misspelled security header name. + MISMATCHING_SECURITY_HEADER_VALUES (int): Mismatching values in a duplicate security header. """ FINDING_TYPE_UNSPECIFIED = 0 @@ -63,6 +70,11 @@ class FindingType(enum.IntEnum): XSS_CALLBACK = 3 XSS_ERROR = 4 CLEAR_TEXT_PASSWORD = 6 + INVALID_CONTENT_TYPE = 7 + XSS_ANGULAR_CALLBACK = 8 + INVALID_HEADER = 9 + MISSPELLED_SECURITY_HEADER_NAME = 10 + MISMATCHING_SECURITY_HEADER_VALUES = 11 class ScanConfig(object): diff --git a/websecurityscanner/google/cloud/websecurityscanner_v1alpha/gapic/web_security_scanner_client.py b/websecurityscanner/google/cloud/websecurityscanner_v1alpha/gapic/web_security_scanner_client.py index 03e217725ffa..dfc93d7269a1 100644 --- a/websecurityscanner/google/cloud/websecurityscanner_v1alpha/gapic/web_security_scanner_client.py +++ b/websecurityscanner/google/cloud/websecurityscanner_v1alpha/gapic/web_security_scanner_client.py @@ -260,11 +260,9 @@ def create_scan_config( >>> response = client.create_scan_config(parent, scan_config) Args: - parent (str): Required. - The parent resource name where the scan is created, which should be a + parent (str): Required. The parent resource name where the scan is created, which should be a project resource name in the format 'projects/{projectId}'. - scan_config (Union[dict, ~google.cloud.websecurityscanner_v1alpha.types.ScanConfig]): Required. - The ScanConfig to be created. + scan_config (Union[dict, ~google.cloud.websecurityscanner_v1alpha.types.ScanConfig]): Required. The ScanConfig to be created. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.websecurityscanner_v1alpha.types.ScanConfig` @@ -338,8 +336,7 @@ def delete_scan_config( >>> client.delete_scan_config(name) Args: - name (str): Required. - The resource name of the ScanConfig to be deleted. The name follows the + name (str): Required. The resource name of the ScanConfig to be deleted. The name follows the format of 'projects/{projectId}/scanConfigs/{scanConfigId}'. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will @@ -406,8 +403,7 @@ def get_scan_config( >>> response = client.get_scan_config(name) Args: - name (str): Required. - The resource name of the ScanConfig to be returned. The name follows the + name (str): Required. The resource name of the ScanConfig to be returned. The name follows the format of 'projects/{projectId}/scanConfigs/{scanConfigId}'. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will @@ -490,8 +486,7 @@ def list_scan_configs( ... pass Args: - parent (str): Required. - The parent resource name, which should be a project resource name in the + parent (str): Required. The parent resource name, which should be a project resource name in the format 'projects/{projectId}'. page_size (int): The maximum number of resources contained in the underlying API response. If page streaming is performed per- @@ -587,8 +582,7 @@ def update_scan_config( >>> response = client.update_scan_config(scan_config, update_mask) Args: - scan_config (Union[dict, ~google.cloud.websecurityscanner_v1alpha.types.ScanConfig]): Required. - The ScanConfig to be updated. The name field must be set to identify the + scan_config (Union[dict, ~google.cloud.websecurityscanner_v1alpha.types.ScanConfig]): Required. The ScanConfig to be updated. The name field must be set to identify the resource to be updated. The values of fields not covered by the mask will be ignored. @@ -670,8 +664,7 @@ def start_scan_run( >>> response = client.start_scan_run(name) Args: - name (str): Required. - The resource name of the ScanConfig to be used. The name follows the + name (str): Required. The resource name of the ScanConfig to be used. The name follows the format of 'projects/{projectId}/scanConfigs/{scanConfigId}'. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will @@ -741,8 +734,7 @@ def get_scan_run( >>> response = client.get_scan_run(name) Args: - name (str): Required. - The resource name of the ScanRun to be returned. The name follows the + name (str): Required. The resource name of the ScanRun to be returned. The name follows the format of 'projects/{projectId}/scanConfigs/{scanConfigId}/scanRuns/{scanRunId}'. retry (Optional[google.api_core.retry.Retry]): A retry object used @@ -827,8 +819,7 @@ def list_scan_runs( ... pass Args: - parent (str): Required. - The parent resource name, which should be a scan resource name in the + parent (str): Required. The parent resource name, which should be a scan resource name in the format 'projects/{projectId}/scanConfigs/{scanConfigId}'. page_size (int): The maximum number of resources contained in the underlying API response. If page streaming is performed per- @@ -919,8 +910,7 @@ def stop_scan_run( >>> response = client.stop_scan_run(name) Args: - name (str): Required. - The resource name of the ScanRun to be stopped. The name follows the + name (str): Required. The resource name of the ScanRun to be stopped. The name follows the format of 'projects/{projectId}/scanConfigs/{scanConfigId}/scanRuns/{scanRunId}'. retry (Optional[google.api_core.retry.Retry]): A retry object used @@ -1004,8 +994,7 @@ def list_crawled_urls( ... pass Args: - parent (str): Required. - The parent resource name, which should be a scan run resource name in the + parent (str): Required. The parent resource name, which should be a scan run resource name in the format 'projects/{projectId}/scanConfigs/{scanConfigId}/scanRuns/{scanRunId}'. page_size (int): The maximum number of resources contained in the @@ -1097,8 +1086,7 @@ def get_finding( >>> response = client.get_finding(name) Args: - name (str): Required. - The resource name of the Finding to be returned. The name follows the + name (str): Required. The resource name of the Finding to be returned. The name follows the format of 'projects/{projectId}/scanConfigs/{scanConfigId}/scanRuns/{scanRunId}/findings/{findingId}'. retry (Optional[google.api_core.retry.Retry]): A retry object used @@ -1186,12 +1174,11 @@ def list_findings( ... pass Args: - parent (str): Required. - The parent resource name, which should be a scan run resource name in the + parent (str): Required. The parent resource name, which should be a scan run resource name in the format 'projects/{projectId}/scanConfigs/{scanConfigId}/scanRuns/{scanRunId}'. - filter_ (str): The filter expression. The expression must be in the format: . Supported - field: 'finding\_type'. Supported operator: '='. + filter_ (str): Required. The filter expression. The expression must be in the format: . + Supported field: 'finding\_type'. Supported operator: '='. page_size (int): The maximum number of resources contained in the underlying API response. If page streaming is performed per- resource, this parameter does not affect the return value. If page @@ -1281,8 +1268,7 @@ def list_finding_type_stats( >>> response = client.list_finding_type_stats(parent) Args: - parent (str): Required. - The parent resource name, which should be a scan run resource name in the + parent (str): Required. The parent resource name, which should be a scan run resource name in the format 'projects/{projectId}/scanConfigs/{scanConfigId}/scanRuns/{scanRunId}'. retry (Optional[google.api_core.retry.Retry]): A retry object used diff --git a/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/crawled_url.proto b/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/crawled_url.proto index d4c40eae86dd..c22f4272f167 100644 --- a/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/crawled_url.proto +++ b/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/crawled_url.proto @@ -1,4 +1,4 @@ -// Copyright 2018 Google Inc. +// Copyright 2019 Google LLC. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -11,13 +11,12 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. +// syntax = "proto3"; package google.cloud.websecurityscanner.v1alpha; -import "google/api/annotations.proto"; - option go_package = "google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha;websecurityscanner"; option java_multiple_files = true; option java_outer_classname = "CrawledUrlProto"; @@ -27,16 +26,13 @@ option java_package = "com.google.cloud.websecurityscanner.v1alpha"; // Security Scanner Service crawls the web applications, following all links // within the scope of sites, to find the URLs to test against. message CrawledUrl { - // Output only. - // The http method of the request that was used to visit the URL, in + // Output only. The http method of the request that was used to visit the URL, in // uppercase. string http_method = 1; - // Output only. - // The URL that was crawled. + // Output only. The URL that was crawled. string url = 2; - // Output only. - // The body of the request that was used to visit the URL. + // Output only. The body of the request that was used to visit the URL. string body = 3; } diff --git a/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/crawled_url_pb2.py b/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/crawled_url_pb2.py index 8c6f9e9551b9..046134e81eb5 100644 --- a/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/crawled_url_pb2.py +++ b/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/crawled_url_pb2.py @@ -15,9 +15,6 @@ _sym_db = _symbol_database.Default() -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 - - DESCRIPTOR = _descriptor.FileDescriptor( name="google/cloud/websecurityscanner_v1alpha/proto/crawled_url.proto", package="google.cloud.websecurityscanner.v1alpha", @@ -26,9 +23,8 @@ "\n+com.google.cloud.websecurityscanner.v1alphaB\017CrawledUrlProtoP\001ZYgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha;websecurityscanner" ), serialized_pb=_b( - "\n?google/cloud/websecurityscanner_v1alpha/proto/crawled_url.proto\x12'google.cloud.websecurityscanner.v1alpha\x1a\x1cgoogle/api/annotations.proto\"<\n\nCrawledUrl\x12\x13\n\x0bhttp_method\x18\x01 \x01(\t\x12\x0b\n\x03url\x18\x02 \x01(\t\x12\x0c\n\x04\x62ody\x18\x03 \x01(\tB\x9b\x01\n+com.google.cloud.websecurityscanner.v1alphaB\x0f\x43rawledUrlProtoP\x01ZYgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha;websecurityscannerb\x06proto3" + "\n?google/cloud/websecurityscanner_v1alpha/proto/crawled_url.proto\x12'google.cloud.websecurityscanner.v1alpha\"<\n\nCrawledUrl\x12\x13\n\x0bhttp_method\x18\x01 \x01(\t\x12\x0b\n\x03url\x18\x02 \x01(\t\x12\x0c\n\x04\x62ody\x18\x03 \x01(\tB\x9b\x01\n+com.google.cloud.websecurityscanner.v1alphaB\x0f\x43rawledUrlProtoP\x01ZYgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha;websecurityscannerb\x06proto3" ), - dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR], ) @@ -102,8 +98,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=138, - serialized_end=198, + serialized_start=108, + serialized_end=168, ) DESCRIPTOR.message_types_by_name["CrawledUrl"] = _CRAWLEDURL diff --git a/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/finding.proto b/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/finding.proto index 07140e276510..c02020f34319 100644 --- a/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/finding.proto +++ b/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/finding.proto @@ -1,4 +1,4 @@ -// Copyright 2018 Google Inc. +// Copyright 2019 Google LLC. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -11,12 +11,13 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. +// syntax = "proto3"; package google.cloud.websecurityscanner.v1alpha; -import "google/api/annotations.proto"; +import "google/api/resource.proto"; import "google/cloud/websecurityscanner/v1alpha/finding_addon.proto"; option go_package = "google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha;websecurityscanner"; @@ -27,6 +28,11 @@ option java_package = "com.google.cloud.websecurityscanner.v1alpha"; // A Finding resource represents a vulnerability instance identified during a // ScanRun. message Finding { + option (google.api.resource) = { + type: "websecurityscanner.googleapis.com/Finding" + pattern: "projects/{project}/scanConfigs/{scan_config}/scanRuns/{scan_run}/findings/{finding}" + }; + // Types of Findings. enum FindingType { // The invalid finding type. @@ -71,70 +77,76 @@ message Finding { // An application appears to be transmitting a password field in clear text. // An attacker can eavesdrop network traffic and sniff the password field. CLEAR_TEXT_PASSWORD = 6; + + // An application returns sensitive content with an invalid content type, + // or without an 'X-Content-Type-Options: nosniff' header. + INVALID_CONTENT_TYPE = 7; + + // A cross-site scripting (XSS) vulnerability in AngularJS module that + // occurs when a user-provided string is interpolated by Angular. + XSS_ANGULAR_CALLBACK = 8; + + // A malformed or invalid valued header. + INVALID_HEADER = 9; + + // Misspelled security header name. + MISSPELLED_SECURITY_HEADER_NAME = 10; + + // Mismatching values in a duplicate security header. + MISMATCHING_SECURITY_HEADER_VALUES = 11; } - // Output only. // The resource name of the Finding. The name follows the format of // 'projects/{projectId}/scanConfigs/{scanConfigId}/scanruns/{scanRunId}/findings/{findingId}'. // The finding IDs are generated by the system. string name = 1; - // Output only. // The type of the Finding. FindingType finding_type = 2; - // Output only. // The http method of the request that triggered the vulnerability, in // uppercase. string http_method = 3; - // Output only. // The URL produced by the server-side fuzzer and used in the request that // triggered the vulnerability. string fuzzed_url = 4; - // Output only. // The body of the request that triggered the vulnerability. string body = 5; - // Output only. // The description of the vulnerability. string description = 6; - // Output only. // The URL containing human-readable payload that user can leverage to // reproduce the vulnerability. string reproduction_url = 7; - // Output only. // If the vulnerability was originated from nested IFrame, the immediate // parent IFrame is reported. string frame_url = 8; - // Output only. // The URL where the browser lands when the vulnerability is detected. string final_url = 9; - // Output only. // The tracking ID uniquely identifies a vulnerability instance across // multiple ScanRuns. string tracking_id = 10; - // Output only. // An addon containing information about outdated libraries. OutdatedLibrary outdated_library = 11; - // Output only. // An addon containing detailed information regarding any resource causing the // vulnerability such as JavaScript sources, image, audio files, etc. ViolatingResource violating_resource = 12; - // Output only. + // An addon containing information about vulnerable or missing HTTP headers. + VulnerableHeaders vulnerable_headers = 15; + // An addon containing information about request parameters which were found // to be vulnerable. VulnerableParameters vulnerable_parameters = 13; - // Output only. // An addon containing information reported for an XSS, if any. Xss xss = 14; } diff --git a/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/finding_addon.proto b/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/finding_addon.proto index a7d344b84bc6..3fafdb40206a 100644 --- a/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/finding_addon.proto +++ b/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/finding_addon.proto @@ -1,4 +1,4 @@ -// Copyright 2018 Google Inc. +// Copyright 2019 Google LLC. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -11,13 +11,12 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. +// syntax = "proto3"; package google.cloud.websecurityscanner.v1alpha; -import "google/api/annotations.proto"; - option go_package = "google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha;websecurityscanner"; option java_multiple_files = true; option java_outer_classname = "FindingAddonProto"; @@ -51,6 +50,24 @@ message VulnerableParameters { repeated string parameter_names = 1; } +// Information about vulnerable or missing HTTP Headers. +message VulnerableHeaders { + // Describes a HTTP Header. + message Header { + // Header name. + string name = 1; + + // Header value. + string value = 2; + } + + // List of vulnerable headers. + repeated Header headers = 1; + + // List of missing headers. + repeated Header missing_headers = 2; +} + // Information reported for an XSS. message Xss { // Stack traces leading to the point where the XSS occurred. diff --git a/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/finding_addon_pb2.py b/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/finding_addon_pb2.py index fa01f56ca8a0..d0c9962adf2a 100644 --- a/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/finding_addon_pb2.py +++ b/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/finding_addon_pb2.py @@ -15,9 +15,6 @@ _sym_db = _symbol_database.Default() -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 - - DESCRIPTOR = _descriptor.FileDescriptor( name="google/cloud/websecurityscanner_v1alpha/proto/finding_addon.proto", package="google.cloud.websecurityscanner.v1alpha", @@ -26,9 +23,8 @@ "\n+com.google.cloud.websecurityscanner.v1alphaB\021FindingAddonProtoP\001ZYgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha;websecurityscanner" ), serialized_pb=_b( - '\nAgoogle/cloud/websecurityscanner_v1alpha/proto/finding_addon.proto\x12\'google.cloud.websecurityscanner.v1alpha\x1a\x1cgoogle/api/annotations.proto"Q\n\x0fOutdatedLibrary\x12\x14\n\x0clibrary_name\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\t\x12\x17\n\x0flearn_more_urls\x18\x03 \x03(\t"?\n\x11ViolatingResource\x12\x14\n\x0c\x63ontent_type\x18\x01 \x01(\t\x12\x14\n\x0cresource_url\x18\x02 \x01(\t"/\n\x14VulnerableParameters\x12\x17\n\x0fparameter_names\x18\x01 \x03(\t"2\n\x03Xss\x12\x14\n\x0cstack_traces\x18\x01 \x03(\t\x12\x15\n\rerror_message\x18\x02 \x01(\tB\x9d\x01\n+com.google.cloud.websecurityscanner.v1alphaB\x11\x46indingAddonProtoP\x01ZYgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha;websecurityscannerb\x06proto3' + '\nAgoogle/cloud/websecurityscanner_v1alpha/proto/finding_addon.proto\x12\'google.cloud.websecurityscanner.v1alpha"Q\n\x0fOutdatedLibrary\x12\x14\n\x0clibrary_name\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\t\x12\x17\n\x0flearn_more_urls\x18\x03 \x03(\t"?\n\x11ViolatingResource\x12\x14\n\x0c\x63ontent_type\x18\x01 \x01(\t\x12\x14\n\x0cresource_url\x18\x02 \x01(\t"/\n\x14VulnerableParameters\x12\x17\n\x0fparameter_names\x18\x01 \x03(\t"\xea\x01\n\x11VulnerableHeaders\x12R\n\x07headers\x18\x01 \x03(\x0b\x32\x41.google.cloud.websecurityscanner.v1alpha.VulnerableHeaders.Header\x12Z\n\x0fmissing_headers\x18\x02 \x03(\x0b\x32\x41.google.cloud.websecurityscanner.v1alpha.VulnerableHeaders.Header\x1a%\n\x06Header\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t"2\n\x03Xss\x12\x14\n\x0cstack_traces\x18\x01 \x03(\t\x12\x15\n\rerror_message\x18\x02 \x01(\tB\x9d\x01\n+com.google.cloud.websecurityscanner.v1alphaB\x11\x46indingAddonProtoP\x01ZYgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha;websecurityscannerb\x06proto3' ), - dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR], ) @@ -102,8 +98,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=140, - serialized_end=221, + serialized_start=110, + serialized_end=191, ) @@ -159,8 +155,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=223, - serialized_end=286, + serialized_start=193, + serialized_end=256, ) @@ -198,8 +194,121 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=288, - serialized_end=335, + serialized_start=258, + serialized_end=305, +) + + +_VULNERABLEHEADERS_HEADER = _descriptor.Descriptor( + name="Header", + full_name="google.cloud.websecurityscanner.v1alpha.VulnerableHeaders.Header", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="name", + full_name="google.cloud.websecurityscanner.v1alpha.VulnerableHeaders.Header.name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="value", + full_name="google.cloud.websecurityscanner.v1alpha.VulnerableHeaders.Header.value", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=505, + serialized_end=542, +) + +_VULNERABLEHEADERS = _descriptor.Descriptor( + name="VulnerableHeaders", + full_name="google.cloud.websecurityscanner.v1alpha.VulnerableHeaders", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="headers", + full_name="google.cloud.websecurityscanner.v1alpha.VulnerableHeaders.headers", + index=0, + number=1, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="missing_headers", + full_name="google.cloud.websecurityscanner.v1alpha.VulnerableHeaders.missing_headers", + index=1, + number=2, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[_VULNERABLEHEADERS_HEADER], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=308, + serialized_end=542, ) @@ -255,13 +364,19 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=337, - serialized_end=387, + serialized_start=544, + serialized_end=594, ) +_VULNERABLEHEADERS_HEADER.containing_type = _VULNERABLEHEADERS +_VULNERABLEHEADERS.fields_by_name["headers"].message_type = _VULNERABLEHEADERS_HEADER +_VULNERABLEHEADERS.fields_by_name[ + "missing_headers" +].message_type = _VULNERABLEHEADERS_HEADER DESCRIPTOR.message_types_by_name["OutdatedLibrary"] = _OUTDATEDLIBRARY DESCRIPTOR.message_types_by_name["ViolatingResource"] = _VIOLATINGRESOURCE DESCRIPTOR.message_types_by_name["VulnerableParameters"] = _VULNERABLEPARAMETERS +DESCRIPTOR.message_types_by_name["VulnerableHeaders"] = _VULNERABLEHEADERS DESCRIPTOR.message_types_by_name["Xss"] = _XSS _sym_db.RegisterFileDescriptor(DESCRIPTOR) @@ -327,6 +442,45 @@ ) _sym_db.RegisterMessage(VulnerableParameters) +VulnerableHeaders = _reflection.GeneratedProtocolMessageType( + "VulnerableHeaders", + (_message.Message,), + dict( + Header=_reflection.GeneratedProtocolMessageType( + "Header", + (_message.Message,), + dict( + DESCRIPTOR=_VULNERABLEHEADERS_HEADER, + __module__="google.cloud.websecurityscanner_v1alpha.proto.finding_addon_pb2", + __doc__="""Describes a HTTP Header. + + + Attributes: + name: + Header name. + value: + Header value. + """, + # @@protoc_insertion_point(class_scope:google.cloud.websecurityscanner.v1alpha.VulnerableHeaders.Header) + ), + ), + DESCRIPTOR=_VULNERABLEHEADERS, + __module__="google.cloud.websecurityscanner_v1alpha.proto.finding_addon_pb2", + __doc__="""Information about vulnerable or missing HTTP Headers. + + + Attributes: + headers: + List of vulnerable headers. + missing_headers: + List of missing headers. + """, + # @@protoc_insertion_point(class_scope:google.cloud.websecurityscanner.v1alpha.VulnerableHeaders) + ), +) +_sym_db.RegisterMessage(VulnerableHeaders) +_sym_db.RegisterMessage(VulnerableHeaders.Header) + Xss = _reflection.GeneratedProtocolMessageType( "Xss", (_message.Message,), diff --git a/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/finding_pb2.py b/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/finding_pb2.py index da3678c40687..4eb9f1c4f84e 100644 --- a/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/finding_pb2.py +++ b/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/finding_pb2.py @@ -15,7 +15,7 @@ _sym_db = _symbol_database.Default() -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 from google.cloud.websecurityscanner_v1alpha.proto import ( finding_addon_pb2 as google_dot_cloud_dot_websecurityscanner__v1alpha_dot_proto_dot_finding__addon__pb2, ) @@ -29,10 +29,10 @@ "\n+com.google.cloud.websecurityscanner.v1alphaB\014FindingProtoP\001ZYgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha;websecurityscanner" ), serialized_pb=_b( - '\n;google/cloud/websecurityscanner_v1alpha/proto/finding.proto\x12\'google.cloud.websecurityscanner.v1alpha\x1a\x1cgoogle/api/annotations.proto\x1a\x41google/cloud/websecurityscanner_v1alpha/proto/finding_addon.proto"\xf5\x05\n\x07\x46inding\x12\x0c\n\x04name\x18\x01 \x01(\t\x12R\n\x0c\x66inding_type\x18\x02 \x01(\x0e\x32<.google.cloud.websecurityscanner.v1alpha.Finding.FindingType\x12\x13\n\x0bhttp_method\x18\x03 \x01(\t\x12\x12\n\nfuzzed_url\x18\x04 \x01(\t\x12\x0c\n\x04\x62ody\x18\x05 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x06 \x01(\t\x12\x18\n\x10reproduction_url\x18\x07 \x01(\t\x12\x11\n\tframe_url\x18\x08 \x01(\t\x12\x11\n\tfinal_url\x18\t \x01(\t\x12\x13\n\x0btracking_id\x18\n \x01(\t\x12R\n\x10outdated_library\x18\x0b \x01(\x0b\x32\x38.google.cloud.websecurityscanner.v1alpha.OutdatedLibrary\x12V\n\x12violating_resource\x18\x0c \x01(\x0b\x32:.google.cloud.websecurityscanner.v1alpha.ViolatingResource\x12\\\n\x15vulnerable_parameters\x18\r \x01(\x0b\x32=.google.cloud.websecurityscanner.v1alpha.VulnerableParameters\x12\x39\n\x03xss\x18\x0e \x01(\x0b\x32,.google.cloud.websecurityscanner.v1alpha.Xss"\xa1\x01\n\x0b\x46indingType\x12\x1c\n\x18\x46INDING_TYPE_UNSPECIFIED\x10\x00\x12\x11\n\rMIXED_CONTENT\x10\x01\x12\x14\n\x10OUTDATED_LIBRARY\x10\x02\x12\x11\n\rROSETTA_FLASH\x10\x05\x12\x10\n\x0cXSS_CALLBACK\x10\x03\x12\r\n\tXSS_ERROR\x10\x04\x12\x17\n\x13\x43LEAR_TEXT_PASSWORD\x10\x06\x42\x98\x01\n+com.google.cloud.websecurityscanner.v1alphaB\x0c\x46indingProtoP\x01ZYgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha;websecurityscannerb\x06proto3' + '\n;google/cloud/websecurityscanner_v1alpha/proto/finding.proto\x12\'google.cloud.websecurityscanner.v1alpha\x1a\x19google/api/resource.proto\x1a\x41google/cloud/websecurityscanner_v1alpha/proto/finding_addon.proto"\xe9\x08\n\x07\x46inding\x12\x0c\n\x04name\x18\x01 \x01(\t\x12R\n\x0c\x66inding_type\x18\x02 \x01(\x0e\x32<.google.cloud.websecurityscanner.v1alpha.Finding.FindingType\x12\x13\n\x0bhttp_method\x18\x03 \x01(\t\x12\x12\n\nfuzzed_url\x18\x04 \x01(\t\x12\x0c\n\x04\x62ody\x18\x05 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x06 \x01(\t\x12\x18\n\x10reproduction_url\x18\x07 \x01(\t\x12\x11\n\tframe_url\x18\x08 \x01(\t\x12\x11\n\tfinal_url\x18\t \x01(\t\x12\x13\n\x0btracking_id\x18\n \x01(\t\x12R\n\x10outdated_library\x18\x0b \x01(\x0b\x32\x38.google.cloud.websecurityscanner.v1alpha.OutdatedLibrary\x12V\n\x12violating_resource\x18\x0c \x01(\x0b\x32:.google.cloud.websecurityscanner.v1alpha.ViolatingResource\x12V\n\x12vulnerable_headers\x18\x0f \x01(\x0b\x32:.google.cloud.websecurityscanner.v1alpha.VulnerableHeaders\x12\\\n\x15vulnerable_parameters\x18\r \x01(\x0b\x32=.google.cloud.websecurityscanner.v1alpha.VulnerableParameters\x12\x39\n\x03xss\x18\x0e \x01(\x0b\x32,.google.cloud.websecurityscanner.v1alpha.Xss"\xb6\x02\n\x0b\x46indingType\x12\x1c\n\x18\x46INDING_TYPE_UNSPECIFIED\x10\x00\x12\x11\n\rMIXED_CONTENT\x10\x01\x12\x14\n\x10OUTDATED_LIBRARY\x10\x02\x12\x11\n\rROSETTA_FLASH\x10\x05\x12\x10\n\x0cXSS_CALLBACK\x10\x03\x12\r\n\tXSS_ERROR\x10\x04\x12\x17\n\x13\x43LEAR_TEXT_PASSWORD\x10\x06\x12\x18\n\x14INVALID_CONTENT_TYPE\x10\x07\x12\x18\n\x14XSS_ANGULAR_CALLBACK\x10\x08\x12\x12\n\x0eINVALID_HEADER\x10\t\x12#\n\x1fMISSPELLED_SECURITY_HEADER_NAME\x10\n\x12&\n"MISMATCHING_SECURITY_HEADER_VALUES\x10\x0b:\x84\x01\xea\x41\x80\x01\n)websecurityscanner.googleapis.com/Finding\x12Sprojects/{project}/scanConfigs/{scan_config}/scanRuns/{scan_run}/findings/{finding}B\x98\x01\n+com.google.cloud.websecurityscanner.v1alphaB\x0c\x46indingProtoP\x01ZYgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha;websecurityscannerb\x06proto3' ), dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_resource__pb2.DESCRIPTOR, google_dot_cloud_dot_websecurityscanner__v1alpha_dot_proto_dot_finding__addon__pb2.DESCRIPTOR, ], ) @@ -77,11 +77,42 @@ serialized_options=None, type=None, ), + _descriptor.EnumValueDescriptor( + name="INVALID_CONTENT_TYPE", + index=7, + number=7, + serialized_options=None, + type=None, + ), + _descriptor.EnumValueDescriptor( + name="XSS_ANGULAR_CALLBACK", + index=8, + number=8, + serialized_options=None, + type=None, + ), + _descriptor.EnumValueDescriptor( + name="INVALID_HEADER", index=9, number=9, serialized_options=None, type=None + ), + _descriptor.EnumValueDescriptor( + name="MISSPELLED_SECURITY_HEADER_NAME", + index=10, + number=10, + serialized_options=None, + type=None, + ), + _descriptor.EnumValueDescriptor( + name="MISMATCHING_SECURITY_HEADER_VALUES", + index=11, + number=11, + serialized_options=None, + type=None, + ), ], containing_type=None, serialized_options=None, - serialized_start=798, - serialized_end=959, + serialized_start=883, + serialized_end=1193, ) _sym_db.RegisterEnumDescriptor(_FINDING_FINDINGTYPE) @@ -309,10 +340,28 @@ serialized_options=None, file=DESCRIPTOR, ), + _descriptor.FieldDescriptor( + name="vulnerable_headers", + full_name="google.cloud.websecurityscanner.v1alpha.Finding.vulnerable_headers", + index=12, + number=15, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), _descriptor.FieldDescriptor( name="vulnerable_parameters", full_name="google.cloud.websecurityscanner.v1alpha.Finding.vulnerable_parameters", - index=12, + index=13, number=13, type=11, cpp_type=10, @@ -330,7 +379,7 @@ _descriptor.FieldDescriptor( name="xss", full_name="google.cloud.websecurityscanner.v1alpha.Finding.xss", - index=13, + index=14, number=14, type=11, cpp_type=10, @@ -349,13 +398,15 @@ extensions=[], nested_types=[], enum_types=[_FINDING_FINDINGTYPE], - serialized_options=None, + serialized_options=_b( + "\352A\200\001\n)websecurityscanner.googleapis.com/Finding\022Sprojects/{project}/scanConfigs/{scan_config}/scanRuns/{scan_run}/findings/{finding}" + ), is_extendable=False, syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=202, - serialized_end=959, + serialized_start=199, + serialized_end=1328, ) _FINDING.fields_by_name["finding_type"].enum_type = _FINDING_FINDINGTYPE @@ -369,6 +420,11 @@ ].message_type = ( google_dot_cloud_dot_websecurityscanner__v1alpha_dot_proto_dot_finding__addon__pb2._VIOLATINGRESOURCE ) +_FINDING.fields_by_name[ + "vulnerable_headers" +].message_type = ( + google_dot_cloud_dot_websecurityscanner__v1alpha_dot_proto_dot_finding__addon__pb2._VULNERABLEHEADERS +) _FINDING.fields_by_name[ "vulnerable_parameters" ].message_type = ( @@ -395,48 +451,48 @@ Attributes: name: - Output only. The resource name of the Finding. The name - follows the format of 'projects/{projectId}/scanConfigs/{scanC - onfigId}/scanruns/{scanRunId}/findings/{findingId}'. The - finding IDs are generated by the system. + The resource name of the Finding. The name follows the format + of 'projects/{projectId}/scanConfigs/{scanConfigId}/scanruns/{ + scanRunId}/findings/{findingId}'. The finding IDs are + generated by the system. finding_type: - Output only. The type of the Finding. + The type of the Finding. http_method: - Output only. The http method of the request that triggered the + The http method of the request that triggered the vulnerability, in uppercase. fuzzed_url: - Output only. The URL produced by the server-side fuzzer and - used in the request that triggered the vulnerability. + The URL produced by the server-side fuzzer and used in the + request that triggered the vulnerability. body: - Output only. The body of the request that triggered the - vulnerability. + The body of the request that triggered the vulnerability. description: - Output only. The description of the vulnerability. + The description of the vulnerability. reproduction_url: - Output only. The URL containing human-readable payload that - user can leverage to reproduce the vulnerability. + The URL containing human-readable payload that user can + leverage to reproduce the vulnerability. frame_url: - Output only. If the vulnerability was originated from nested - IFrame, the immediate parent IFrame is reported. + If the vulnerability was originated from nested IFrame, the + immediate parent IFrame is reported. final_url: - Output only. The URL where the browser lands when the - vulnerability is detected. + The URL where the browser lands when the vulnerability is + detected. tracking_id: - Output only. The tracking ID uniquely identifies a - vulnerability instance across multiple ScanRuns. + The tracking ID uniquely identifies a vulnerability instance + across multiple ScanRuns. outdated_library: - Output only. An addon containing information about outdated - libraries. + An addon containing information about outdated libraries. violating_resource: - Output only. An addon containing detailed information - regarding any resource causing the vulnerability such as - JavaScript sources, image, audio files, etc. + An addon containing detailed information regarding any + resource causing the vulnerability such as JavaScript sources, + image, audio files, etc. + vulnerable_headers: + An addon containing information about vulnerable or missing + HTTP headers. vulnerable_parameters: - Output only. An addon containing information about request - parameters which were found to be vulnerable. + An addon containing information about request parameters which + were found to be vulnerable. xss: - Output only. An addon containing information reported for an - XSS, if any. + An addon containing information reported for an XSS, if any. """, # @@protoc_insertion_point(class_scope:google.cloud.websecurityscanner.v1alpha.Finding) ), @@ -445,4 +501,5 @@ DESCRIPTOR._options = None +_FINDING._options = None # @@protoc_insertion_point(module_scope) diff --git a/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/finding_type_stats.proto b/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/finding_type_stats.proto index 73115667f418..e46b330c627e 100644 --- a/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/finding_type_stats.proto +++ b/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/finding_type_stats.proto @@ -1,4 +1,4 @@ -// Copyright 2018 Google Inc. +// Copyright 2019 Google LLC. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -11,12 +11,12 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. +// syntax = "proto3"; package google.cloud.websecurityscanner.v1alpha; -import "google/api/annotations.proto"; import "google/cloud/websecurityscanner/v1alpha/finding.proto"; option go_package = "google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha;websecurityscanner"; @@ -27,11 +27,9 @@ option java_package = "com.google.cloud.websecurityscanner.v1alpha"; // A FindingTypeStats resource represents stats regarding a specific FindingType // of Findings under a given ScanRun. message FindingTypeStats { - // Output only. // The finding type associated with the stats. Finding.FindingType finding_type = 1; - // Output only. // The count of findings belonging to this finding type. int32 finding_count = 2; } diff --git a/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/finding_type_stats_pb2.py b/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/finding_type_stats_pb2.py index a3e68d8e4184..94558c069bb9 100644 --- a/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/finding_type_stats_pb2.py +++ b/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/finding_type_stats_pb2.py @@ -15,7 +15,6 @@ _sym_db = _symbol_database.Default() -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 from google.cloud.websecurityscanner_v1alpha.proto import ( finding_pb2 as google_dot_cloud_dot_websecurityscanner__v1alpha_dot_proto_dot_finding__pb2, ) @@ -29,11 +28,10 @@ "\n+com.google.cloud.websecurityscanner.v1alphaB\025FindingTypeStatsProtoP\001ZYgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha;websecurityscanner" ), serialized_pb=_b( - "\nFgoogle/cloud/websecurityscanner_v1alpha/proto/finding_type_stats.proto\x12'google.cloud.websecurityscanner.v1alpha\x1a\x1cgoogle/api/annotations.proto\x1a;google/cloud/websecurityscanner_v1alpha/proto/finding.proto\"}\n\x10\x46indingTypeStats\x12R\n\x0c\x66inding_type\x18\x01 \x01(\x0e\x32<.google.cloud.websecurityscanner.v1alpha.Finding.FindingType\x12\x15\n\rfinding_count\x18\x02 \x01(\x05\x42\xa1\x01\n+com.google.cloud.websecurityscanner.v1alphaB\x15\x46indingTypeStatsProtoP\x01ZYgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha;websecurityscannerb\x06proto3" + "\nFgoogle/cloud/websecurityscanner_v1alpha/proto/finding_type_stats.proto\x12'google.cloud.websecurityscanner.v1alpha\x1a;google/cloud/websecurityscanner_v1alpha/proto/finding.proto\"}\n\x10\x46indingTypeStats\x12R\n\x0c\x66inding_type\x18\x01 \x01(\x0e\x32<.google.cloud.websecurityscanner.v1alpha.Finding.FindingType\x12\x15\n\rfinding_count\x18\x02 \x01(\x05\x42\xa1\x01\n+com.google.cloud.websecurityscanner.v1alphaB\x15\x46indingTypeStatsProtoP\x01ZYgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha;websecurityscannerb\x06proto3" ), dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - google_dot_cloud_dot_websecurityscanner__v1alpha_dot_proto_dot_finding__pb2.DESCRIPTOR, + google_dot_cloud_dot_websecurityscanner__v1alpha_dot_proto_dot_finding__pb2.DESCRIPTOR ], ) @@ -90,8 +88,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=206, - serialized_end=331, + serialized_start=176, + serialized_end=301, ) _FINDINGTYPESTATS.fields_by_name[ @@ -114,10 +112,9 @@ Attributes: finding_type: - Output only. The finding type associated with the stats. + The finding type associated with the stats. finding_count: - Output only. The count of findings belonging to this finding - type. + The count of findings belonging to this finding type. """, # @@protoc_insertion_point(class_scope:google.cloud.websecurityscanner.v1alpha.FindingTypeStats) ), diff --git a/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/scan_config.proto b/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/scan_config.proto index 144f7f2b5e4c..5497b1a0f6ac 100644 --- a/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/scan_config.proto +++ b/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/scan_config.proto @@ -1,4 +1,4 @@ -// Copyright 2018 Google Inc. +// Copyright 2019 Google LLC. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -11,12 +11,15 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. +// syntax = "proto3"; package google.cloud.websecurityscanner.v1alpha; -import "google/api/annotations.proto"; +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; +import "google/cloud/websecurityscanner/v1alpha/scan_run.proto"; import "google/protobuf/timestamp.proto"; option go_package = "google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha;websecurityscanner"; @@ -25,37 +28,42 @@ option java_outer_classname = "ScanConfigProto"; option java_package = "com.google.cloud.websecurityscanner.v1alpha"; // A ScanConfig resource contains the configurations to launch a scan. +// next id: 12 message ScanConfig { + option (google.api.resource) = { + type: "websecurityscanner.googleapis.com/ScanConfig" + pattern: "projects/{project}/scanConfigs/{scan_config}" + }; + // Scan authentication configuration. message Authentication { // Describes authentication configuration that uses a Google account. message GoogleAccount { - // Required. - // The user name of the Google account. - string username = 1; - - // Input only. - // Required. - // The password of the Google account. The credential is stored encrypted - // and not returned in any response. - string password = 2; + // Required. The user name of the Google account. + string username = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. Input only. The password of the Google account. The credential is stored encrypted + // and not returned in any response nor included in audit logs. + string password = 2 [ + (google.api.field_behavior) = REQUIRED, + (google.api.field_behavior) = INPUT_ONLY + ]; } // Describes authentication configuration that uses a custom account. message CustomAccount { - // Required. - // The user name of the custom account. - string username = 1; - - // Input only. - // Required. - // The password of the custom account. The credential is stored encrypted - // and not returned in any response. - string password = 2; - - // Required. - // The login form URL of the website. - string login_url = 3; + // Required. The user name of the custom account. + string username = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. Input only. The password of the custom account. The credential is stored encrypted + // and not returned in any response nor included in audit logs. + string password = 2 [ + (google.api.field_behavior) = REQUIRED, + (google.api.field_behavior) = INPUT_ONLY + ]; + + // Required. The login form URL of the website. + string login_url = 3 [(google.api.field_behavior) = REQUIRED]; } // Required. @@ -77,9 +85,8 @@ message ScanConfig { // immediately. google.protobuf.Timestamp schedule_time = 1; - // Required. - // The duration of time between executions in days. - int32 interval_duration_days = 2; + // Required. The duration of time between executions in days. + int32 interval_duration_days = 2 [(google.api.field_behavior) = REQUIRED]; } // Type of user agents used for scanning. @@ -115,9 +122,8 @@ message ScanConfig { // generated by the system. string name = 1; - // Required. - // The user provided display name of the ScanConfig. - string display_name = 2; + // Required. The user provided display name of the ScanConfig. + string display_name = 2 [(google.api.field_behavior) = REQUIRED]; // The maximum QPS during scanning. A valid value ranges from 5 to 20 // inclusively. If the field is unspecified or its value is set 0, server will @@ -125,9 +131,8 @@ message ScanConfig { // INVALID_ARGUMENT error. int32 max_qps = 3; - // Required. - // The starting URLs from which the scanner finds site pages. - repeated string starting_urls = 4; + // Required. The starting URLs from which the scanner finds site pages. + repeated string starting_urls = 4 [(google.api.field_behavior) = REQUIRED]; // The authentication configuration. If specified, service will use the // authentication configuration during scanning. @@ -146,4 +151,7 @@ message ScanConfig { // Set of Cloud Platforms targeted by the scan. If empty, APP_ENGINE will be // used as a default. repeated TargetPlatform target_platforms = 9; + + // Latest ScanRun if available. + ScanRun latest_run = 11; } diff --git a/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/scan_config_pb2.py b/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/scan_config_pb2.py index 71de8f3294cf..c85206f135ad 100644 --- a/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/scan_config_pb2.py +++ b/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/scan_config_pb2.py @@ -15,7 +15,11 @@ _sym_db = _symbol_database.Default() -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 +from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 +from google.cloud.websecurityscanner_v1alpha.proto import ( + scan_run_pb2 as google_dot_cloud_dot_websecurityscanner__v1alpha_dot_proto_dot_scan__run__pb2, +) from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 @@ -27,10 +31,12 @@ "\n+com.google.cloud.websecurityscanner.v1alphaB\017ScanConfigProtoP\001ZYgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha;websecurityscanner" ), serialized_pb=_b( - '\n?google/cloud/websecurityscanner_v1alpha/proto/scan_config.proto\x12\'google.cloud.websecurityscanner.v1alpha\x1a\x1cgoogle/api/annotations.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xdc\x08\n\nScanConfig\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x02 \x01(\t\x12\x0f\n\x07max_qps\x18\x03 \x01(\x05\x12\x15\n\rstarting_urls\x18\x04 \x03(\t\x12Z\n\x0e\x61uthentication\x18\x05 \x01(\x0b\x32\x42.google.cloud.websecurityscanner.v1alpha.ScanConfig.Authentication\x12Q\n\nuser_agent\x18\x06 \x01(\x0e\x32=.google.cloud.websecurityscanner.v1alpha.ScanConfig.UserAgent\x12\x1a\n\x12\x62lacklist_patterns\x18\x07 \x03(\t\x12N\n\x08schedule\x18\x08 \x01(\x0b\x32<.google.cloud.websecurityscanner.v1alpha.ScanConfig.Schedule\x12\\\n\x10target_platforms\x18\t \x03(\x0e\x32\x42.google.cloud.websecurityscanner.v1alpha.ScanConfig.TargetPlatform\x1a\xf7\x02\n\x0e\x41uthentication\x12j\n\x0egoogle_account\x18\x01 \x01(\x0b\x32P.google.cloud.websecurityscanner.v1alpha.ScanConfig.Authentication.GoogleAccountH\x00\x12j\n\x0e\x63ustom_account\x18\x02 \x01(\x0b\x32P.google.cloud.websecurityscanner.v1alpha.ScanConfig.Authentication.CustomAccountH\x00\x1a\x33\n\rGoogleAccount\x12\x10\n\x08username\x18\x01 \x01(\t\x12\x10\n\x08password\x18\x02 \x01(\t\x1a\x46\n\rCustomAccount\x12\x10\n\x08username\x18\x01 \x01(\t\x12\x10\n\x08password\x18\x02 \x01(\t\x12\x11\n\tlogin_url\x18\x03 \x01(\tB\x10\n\x0e\x61uthentication\x1a]\n\x08Schedule\x12\x31\n\rschedule_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x1e\n\x16interval_duration_days\x18\x02 \x01(\x05"`\n\tUserAgent\x12\x1a\n\x16USER_AGENT_UNSPECIFIED\x10\x00\x12\x10\n\x0c\x43HROME_LINUX\x10\x01\x12\x12\n\x0e\x43HROME_ANDROID\x10\x02\x12\x11\n\rSAFARI_IPHONE\x10\x03"N\n\x0eTargetPlatform\x12\x1f\n\x1bTARGET_PLATFORM_UNSPECIFIED\x10\x00\x12\x0e\n\nAPP_ENGINE\x10\x01\x12\x0b\n\x07\x43OMPUTE\x10\x02\x42\x9b\x01\n+com.google.cloud.websecurityscanner.v1alphaB\x0fScanConfigProtoP\x01ZYgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha;websecurityscannerb\x06proto3' + '\n?google/cloud/websecurityscanner_v1alpha/proto/scan_config.proto\x12\'google.cloud.websecurityscanner.v1alpha\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "websecurityscanner.googleapis.com/ScanRun" + } + ]; + + // Required. The filter expression. The expression must be in the format: // . // Supported field: 'finding_type'. // Supported operator: '='. - string filter = 2; + string filter = 2 [(google.api.field_behavior) = REQUIRED]; // A token identifying a page of results to be returned. This should be a // `next_page_token` value returned from a previous List request. @@ -335,11 +392,15 @@ message ListFindingsResponse { // Request for the `ListFindingTypeStats` method. message ListFindingTypeStatsRequest { - // Required. - // The parent resource name, which should be a scan run resource name in the + // Required. The parent resource name, which should be a scan run resource name in the // format // 'projects/{projectId}/scanConfigs/{scanConfigId}/scanRuns/{scanRunId}'. - string parent = 1; + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "websecurityscanner.googleapis.com/ScanRun" + } + ]; } // Response for the `ListFindingTypeStats` method. diff --git a/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/web_security_scanner_pb2.py b/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/web_security_scanner_pb2.py index 6cdfadd707ce..dd3772510a9e 100644 --- a/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/web_security_scanner_pb2.py +++ b/websecurityscanner/google/cloud/websecurityscanner_v1alpha/proto/web_security_scanner_pb2.py @@ -16,6 +16,9 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 +from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 from google.cloud.websecurityscanner_v1alpha.proto import ( crawled_url_pb2 as google_dot_cloud_dot_websecurityscanner__v1alpha_dot_proto_dot_crawled__url__pb2, ) @@ -43,10 +46,13 @@ "\n+com.google.cloud.websecurityscanner.v1alphaB\027WebSecurityScannerProtoP\001ZYgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha;websecurityscanner" ), serialized_pb=_b( - '\nHgoogle/cloud/websecurityscanner_v1alpha/proto/web_security_scanner.proto\x12\'google.cloud.websecurityscanner.v1alpha\x1a\x1cgoogle/api/annotations.proto\x1a?google/cloud/websecurityscanner_v1alpha/proto/crawled_url.proto\x1a;google/cloud/websecurityscanner_v1alpha/proto/finding.proto\x1a\x46google/cloud/websecurityscanner_v1alpha/proto/finding_type_stats.proto\x1a?google/cloud/websecurityscanner_v1alpha/proto/scan_config.proto\x1a/v1alpha/{name=projects/*/scanConfigs/*/scanRuns/*/findings/*}\x12\xd3\x01\n\x0cListFindings\x12<.google.cloud.websecurityscanner.v1alpha.ListFindingsRequest\x1a=.google.cloud.websecurityscanner.v1alpha.ListFindingsResponse"F\x82\xd3\xe4\x93\x02@\x12>/v1alpha/{parent=projects/*/scanConfigs/*/scanRuns/*}/findings\x12\xf3\x01\n\x14ListFindingTypeStats\x12\x44.google.cloud.websecurityscanner.v1alpha.ListFindingTypeStatsRequest\x1a\x45.google.cloud.websecurityscanner.v1alpha.ListFindingTypeStatsResponse"N\x82\xd3\xe4\x93\x02H\x12\x46/v1alpha/{parent=projects/*/scanConfigs/*/scanRuns/*}/findingTypeStatsB\xa3\x01\n+com.google.cloud.websecurityscanner.v1alphaB\x17WebSecurityScannerProtoP\x01ZYgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha;websecurityscannerb\x06proto3' + '\nHgoogle/cloud/websecurityscanner_v1alpha/proto/web_security_scanner.proto\x12\'google.cloud.websecurityscanner.v1alpha\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a?google/cloud/websecurityscanner_v1alpha/proto/crawled_url.proto\x1a;google/cloud/websecurityscanner_v1alpha/proto/finding.proto\x1a\x46google/cloud/websecurityscanner_v1alpha/proto/finding_type_stats.proto\x1a?google/cloud/websecurityscanner_v1alpha/proto/scan_config.proto\x1a/v1alpha/{name=projects/*/scanConfigs/*/scanRuns/*/findings/*}\xda\x41\x04name\x12\xe3\x01\n\x0cListFindings\x12<.google.cloud.websecurityscanner.v1alpha.ListFindingsRequest\x1a=.google.cloud.websecurityscanner.v1alpha.ListFindingsResponse"V\x82\xd3\xe4\x93\x02@\x12>/v1alpha/{parent=projects/*/scanConfigs/*/scanRuns/*}/findings\xda\x41\rparent,filter\x12\xfc\x01\n\x14ListFindingTypeStats\x12\x44.google.cloud.websecurityscanner.v1alpha.ListFindingTypeStatsRequest\x1a\x45.google.cloud.websecurityscanner.v1alpha.ListFindingTypeStatsResponse"W\x82\xd3\xe4\x93\x02H\x12\x46/v1alpha/{parent=projects/*/scanConfigs/*/scanRuns/*}/findingTypeStats\xda\x41\x06parent\x1aU\xca\x41!websecurityscanner.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformB\xa3\x01\n+com.google.cloud.websecurityscanner.v1alphaB\x17WebSecurityScannerProtoP\x01ZYgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha;websecurityscannerb\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_client__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, + google_dot_api_dot_resource__pb2.DESCRIPTOR, google_dot_cloud_dot_websecurityscanner__v1alpha_dot_proto_dot_crawled__url__pb2.DESCRIPTOR, google_dot_cloud_dot_websecurityscanner__v1alpha_dot_proto_dot_finding__pb2.DESCRIPTOR, google_dot_cloud_dot_websecurityscanner__v1alpha_dot_proto_dot_finding__type__stats__pb2.DESCRIPTOR, @@ -80,7 +86,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A-\n+cloudresourcemanager.googleapis.com/Project" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -98,7 +106,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -110,8 +118,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=535, - serialized_end=650, + serialized_start=621, + serialized_end=794, ) @@ -137,7 +145,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A.\n,websecurityscanner.googleapis.com/ScanConfig" + ), file=DESCRIPTOR, ) ], @@ -149,8 +159,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=652, - serialized_end=691, + serialized_start=796, + serialized_end=889, ) @@ -176,7 +186,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A.\n,websecurityscanner.googleapis.com/ScanConfig" + ), file=DESCRIPTOR, ) ], @@ -188,8 +200,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=693, - serialized_end=729, + serialized_start=891, + serialized_end=981, ) @@ -215,7 +227,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A-\n+cloudresourcemanager.googleapis.com/Project" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -263,8 +277,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=731, - serialized_end=810, + serialized_start=984, + serialized_end=1116, ) @@ -290,7 +304,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -308,7 +322,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -320,8 +334,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=813, - serialized_end=961, + serialized_start=1119, + serialized_end=1277, ) @@ -377,8 +391,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=963, - serialized_end=1088, + serialized_start=1279, + serialized_end=1404, ) @@ -404,7 +418,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A.\n,websecurityscanner.googleapis.com/ScanConfig" + ), file=DESCRIPTOR, ) ], @@ -416,8 +432,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1090, - serialized_end=1125, + serialized_start=1406, + serialized_end=1495, ) @@ -443,7 +459,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A+\n)websecurityscanner.googleapis.com/ScanRun" + ), file=DESCRIPTOR, ) ], @@ -455,8 +473,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1127, - serialized_end=1160, + serialized_start=1497, + serialized_end=1581, ) @@ -482,7 +500,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A.\n,websecurityscanner.googleapis.com/ScanConfig" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -530,8 +550,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1162, - serialized_end=1238, + serialized_start=1584, + serialized_end=1714, ) @@ -587,8 +607,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1240, - serialized_end=1356, + serialized_start=1716, + serialized_end=1832, ) @@ -614,7 +634,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A+\n)websecurityscanner.googleapis.com/ScanRun" + ), file=DESCRIPTOR, ) ], @@ -626,8 +648,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1358, - serialized_end=1392, + serialized_start=1834, + serialized_end=1919, ) @@ -653,7 +675,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A+\n)websecurityscanner.googleapis.com/ScanRun" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -701,8 +725,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1394, - serialized_end=1473, + serialized_start=1922, + serialized_end=2052, ) @@ -758,8 +782,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1475, - serialized_end=1600, + serialized_start=2054, + serialized_end=2179, ) @@ -785,7 +809,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A+\n)websecurityscanner.googleapis.com/Finding" + ), file=DESCRIPTOR, ) ], @@ -797,8 +823,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1602, - serialized_end=1635, + serialized_start=2181, + serialized_end=2265, ) @@ -824,7 +850,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A+\n)websecurityscanner.googleapis.com/ScanRun" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -842,7 +870,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -890,8 +918,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1637, - serialized_end=1729, + serialized_start=2268, + serialized_end=2416, ) @@ -947,8 +975,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1731, - serialized_end=1846, + serialized_start=2418, + serialized_end=2533, ) @@ -974,7 +1002,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A+\n)websecurityscanner.googleapis.com/ScanRun" + ), file=DESCRIPTOR, ) ], @@ -986,8 +1016,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1848, - serialized_end=1893, + serialized_start=2535, + serialized_end=2631, ) @@ -1025,8 +1055,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1895, - serialized_end=2012, + serialized_start=2633, + serialized_end=2750, ) _CREATESCANCONFIGREQUEST.fields_by_name[ @@ -1422,8 +1452,9 @@ resource name in the format 'projects/{projectId}/scanConfigs/ {scanConfigId}/scanRuns/{scanRunId}'. filter: - The filter expression. The expression must be in the format: . - Supported field: 'finding\_type'. Supported operator: '='. + Required. The filter expression. The expression must be in the + format: . Supported field: 'finding\_type'. Supported + operator: '='. page_token: A token identifying a page of results to be returned. This should be a ``next_page_token`` value returned from a previous @@ -1500,15 +1531,33 @@ DESCRIPTOR._options = None +_CREATESCANCONFIGREQUEST.fields_by_name["parent"]._options = None +_CREATESCANCONFIGREQUEST.fields_by_name["scan_config"]._options = None +_DELETESCANCONFIGREQUEST.fields_by_name["name"]._options = None +_GETSCANCONFIGREQUEST.fields_by_name["name"]._options = None +_LISTSCANCONFIGSREQUEST.fields_by_name["parent"]._options = None +_UPDATESCANCONFIGREQUEST.fields_by_name["scan_config"]._options = None +_UPDATESCANCONFIGREQUEST.fields_by_name["update_mask"]._options = None +_STARTSCANRUNREQUEST.fields_by_name["name"]._options = None +_GETSCANRUNREQUEST.fields_by_name["name"]._options = None +_LISTSCANRUNSREQUEST.fields_by_name["parent"]._options = None +_STOPSCANRUNREQUEST.fields_by_name["name"]._options = None +_LISTCRAWLEDURLSREQUEST.fields_by_name["parent"]._options = None +_GETFINDINGREQUEST.fields_by_name["name"]._options = None +_LISTFINDINGSREQUEST.fields_by_name["parent"]._options = None +_LISTFINDINGSREQUEST.fields_by_name["filter"]._options = None +_LISTFINDINGTYPESTATSREQUEST.fields_by_name["parent"]._options = None _WEBSECURITYSCANNER = _descriptor.ServiceDescriptor( name="WebSecurityScanner", full_name="google.cloud.websecurityscanner.v1alpha.WebSecurityScanner", file=DESCRIPTOR, index=0, - serialized_options=None, - serialized_start=2015, - serialized_end=4655, + serialized_options=_b( + "\312A!websecurityscanner.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform" + ), + serialized_start=2753, + serialized_end=5621, methods=[ _descriptor.MethodDescriptor( name="CreateScanConfig", @@ -1518,7 +1567,7 @@ input_type=_CREATESCANCONFIGREQUEST, output_type=google_dot_cloud_dot_websecurityscanner__v1alpha_dot_proto_dot_scan__config__pb2._SCANCONFIG, serialized_options=_b( - '\202\323\344\223\0027"(/v1alpha/{parent=projects/*}/scanConfigs:\013scan_config' + '\202\323\344\223\0027"(/v1alpha/{parent=projects/*}/scanConfigs:\013scan_config\332A\022parent,scan_config' ), ), _descriptor.MethodDescriptor( @@ -1529,7 +1578,7 @@ input_type=_DELETESCANCONFIGREQUEST, output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, serialized_options=_b( - "\202\323\344\223\002**(/v1alpha/{name=projects/*/scanConfigs/*}" + "\202\323\344\223\002**(/v1alpha/{name=projects/*/scanConfigs/*}\332A\004name" ), ), _descriptor.MethodDescriptor( @@ -1540,7 +1589,7 @@ input_type=_GETSCANCONFIGREQUEST, output_type=google_dot_cloud_dot_websecurityscanner__v1alpha_dot_proto_dot_scan__config__pb2._SCANCONFIG, serialized_options=_b( - "\202\323\344\223\002*\022(/v1alpha/{name=projects/*/scanConfigs/*}" + "\202\323\344\223\002*\022(/v1alpha/{name=projects/*/scanConfigs/*}\332A\004name" ), ), _descriptor.MethodDescriptor( @@ -1551,7 +1600,7 @@ input_type=_LISTSCANCONFIGSREQUEST, output_type=_LISTSCANCONFIGSRESPONSE, serialized_options=_b( - "\202\323\344\223\002*\022(/v1alpha/{parent=projects/*}/scanConfigs" + "\202\323\344\223\002*\022(/v1alpha/{parent=projects/*}/scanConfigs\332A\006parent" ), ), _descriptor.MethodDescriptor( @@ -1562,7 +1611,7 @@ input_type=_UPDATESCANCONFIGREQUEST, output_type=google_dot_cloud_dot_websecurityscanner__v1alpha_dot_proto_dot_scan__config__pb2._SCANCONFIG, serialized_options=_b( - "\202\323\344\223\002C24/v1alpha/{scan_config.name=projects/*/scanConfigs/*}:\013scan_config" + "\202\323\344\223\002C24/v1alpha/{scan_config.name=projects/*/scanConfigs/*}:\013scan_config\332A\027scan_config,update_mask" ), ), _descriptor.MethodDescriptor( @@ -1573,7 +1622,7 @@ input_type=_STARTSCANRUNREQUEST, output_type=google_dot_cloud_dot_websecurityscanner__v1alpha_dot_proto_dot_scan__run__pb2._SCANRUN, serialized_options=_b( - '\202\323\344\223\0023"./v1alpha/{name=projects/*/scanConfigs/*}:start:\001*' + '\202\323\344\223\0023"./v1alpha/{name=projects/*/scanConfigs/*}:start:\001*\332A\004name' ), ), _descriptor.MethodDescriptor( @@ -1584,7 +1633,7 @@ input_type=_GETSCANRUNREQUEST, output_type=google_dot_cloud_dot_websecurityscanner__v1alpha_dot_proto_dot_scan__run__pb2._SCANRUN, serialized_options=_b( - "\202\323\344\223\0025\0223/v1alpha/{name=projects/*/scanConfigs/*/scanRuns/*}" + "\202\323\344\223\0025\0223/v1alpha/{name=projects/*/scanConfigs/*/scanRuns/*}\332A\004name" ), ), _descriptor.MethodDescriptor( @@ -1595,7 +1644,7 @@ input_type=_LISTSCANRUNSREQUEST, output_type=_LISTSCANRUNSRESPONSE, serialized_options=_b( - "\202\323\344\223\0025\0223/v1alpha/{parent=projects/*/scanConfigs/*}/scanRuns" + "\202\323\344\223\0025\0223/v1alpha/{parent=projects/*/scanConfigs/*}/scanRuns\332A\006parent" ), ), _descriptor.MethodDescriptor( @@ -1606,7 +1655,7 @@ input_type=_STOPSCANRUNREQUEST, output_type=google_dot_cloud_dot_websecurityscanner__v1alpha_dot_proto_dot_scan__run__pb2._SCANRUN, serialized_options=_b( - '\202\323\344\223\002="8/v1alpha/{name=projects/*/scanConfigs/*/scanRuns/*}:stop:\001*' + '\202\323\344\223\002="8/v1alpha/{name=projects/*/scanConfigs/*/scanRuns/*}:stop:\001*\332A\004name' ), ), _descriptor.MethodDescriptor( @@ -1617,7 +1666,7 @@ input_type=_LISTCRAWLEDURLSREQUEST, output_type=_LISTCRAWLEDURLSRESPONSE, serialized_options=_b( - "\202\323\344\223\002C\022A/v1alpha/{parent=projects/*/scanConfigs/*/scanRuns/*}/crawledUrls" + "\202\323\344\223\002C\022A/v1alpha/{parent=projects/*/scanConfigs/*/scanRuns/*}/crawledUrls\332A\006parent" ), ), _descriptor.MethodDescriptor( @@ -1628,7 +1677,7 @@ input_type=_GETFINDINGREQUEST, output_type=google_dot_cloud_dot_websecurityscanner__v1alpha_dot_proto_dot_finding__pb2._FINDING, serialized_options=_b( - "\202\323\344\223\002@\022>/v1alpha/{name=projects/*/scanConfigs/*/scanRuns/*/findings/*}" + "\202\323\344\223\002@\022>/v1alpha/{name=projects/*/scanConfigs/*/scanRuns/*/findings/*}\332A\004name" ), ), _descriptor.MethodDescriptor( @@ -1639,7 +1688,7 @@ input_type=_LISTFINDINGSREQUEST, output_type=_LISTFINDINGSRESPONSE, serialized_options=_b( - "\202\323\344\223\002@\022>/v1alpha/{parent=projects/*/scanConfigs/*/scanRuns/*}/findings" + "\202\323\344\223\002@\022>/v1alpha/{parent=projects/*/scanConfigs/*/scanRuns/*}/findings\332A\rparent,filter" ), ), _descriptor.MethodDescriptor( @@ -1650,7 +1699,7 @@ input_type=_LISTFINDINGTYPESTATSREQUEST, output_type=_LISTFINDINGTYPESTATSRESPONSE, serialized_options=_b( - "\202\323\344\223\002H\022F/v1alpha/{parent=projects/*/scanConfigs/*/scanRuns/*}/findingTypeStats" + "\202\323\344\223\002H\022F/v1alpha/{parent=projects/*/scanConfigs/*/scanRuns/*}/findingTypeStats\332A\006parent" ), ), ], diff --git a/websecurityscanner/google/cloud/websecurityscanner_v1beta/gapic/enums.py b/websecurityscanner/google/cloud/websecurityscanner_v1beta/gapic/enums.py index 38dde89453e2..244c5e058ef8 100644 --- a/websecurityscanner/google/cloud/websecurityscanner_v1beta/gapic/enums.py +++ b/websecurityscanner/google/cloud/websecurityscanner_v1beta/gapic/enums.py @@ -88,7 +88,7 @@ class Code(enum.IntEnum): """ Output only. Defines an error reason code. - Next id: 43 + Next id: 44 Attributes: CODE_UNSPECIFIED (int): There is no error. @@ -121,6 +121,7 @@ class Code(enum.IntEnum): FINDING_TYPE_UNSPECIFIED (int): Finding type value is not specified in the list findings request. FORBIDDEN_TO_SCAN_COMPUTE (int): Scan targets Compute Engine, yet current project was not whitelisted for Google Compute Engine Scanning Alpha access. + FORBIDDEN_UPDATE_TO_MANAGED_SCAN (int): User tries to update managed scan MALFORMED_FILTER (int): The supplied filter is malformed. For example, it can not be parsed, does not have a filter type in expression, or the same filter type appears more than once. @@ -171,6 +172,7 @@ class Code(enum.IntEnum): FAILED_TO_AUTHENTICATE_TO_TARGET = 19 FINDING_TYPE_UNSPECIFIED = 20 FORBIDDEN_TO_SCAN_COMPUTE = 21 + FORBIDDEN_UPDATE_TO_MANAGED_SCAN = 43 MALFORMED_FILTER = 22 MALFORMED_RESOURCE_NAME = 23 PROJECT_INACTIVE = 24 @@ -248,7 +250,7 @@ class Code(enum.IntEnum): authentication or some other page outside of the scan scope. TOO_MANY_HTTP_ERRORS (int): Indicates that a scan encountered numerous errors from the web site pages. When available, most\_common\_http\_error\_code field indicates - the the most common HTTP error code encountered during the scan. + the most common HTTP error code encountered during the scan. """ CODE_UNSPECIFIED = 0 @@ -265,7 +267,7 @@ class Code(enum.IntEnum): """ Output only. Defines a warning message code. - Next id: 5 + Next id: 6 Attributes: CODE_UNSPECIFIED (int): Default value is never used. diff --git a/websecurityscanner/google/cloud/websecurityscanner_v1beta/gapic/web_security_scanner_client.py b/websecurityscanner/google/cloud/websecurityscanner_v1beta/gapic/web_security_scanner_client.py index fc1dc384dd9b..8529b02bf489 100644 --- a/websecurityscanner/google/cloud/websecurityscanner_v1beta/gapic/web_security_scanner_client.py +++ b/websecurityscanner/google/cloud/websecurityscanner_v1beta/gapic/web_security_scanner_client.py @@ -260,11 +260,9 @@ def create_scan_config( >>> response = client.create_scan_config(parent, scan_config) Args: - parent (str): Required. - The parent resource name where the scan is created, which should be a + parent (str): Required. The parent resource name where the scan is created, which should be a project resource name in the format 'projects/{projectId}'. - scan_config (Union[dict, ~google.cloud.websecurityscanner_v1beta.types.ScanConfig]): Required. - The ScanConfig to be created. + scan_config (Union[dict, ~google.cloud.websecurityscanner_v1beta.types.ScanConfig]): Required. The ScanConfig to be created. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.websecurityscanner_v1beta.types.ScanConfig` @@ -338,8 +336,7 @@ def delete_scan_config( >>> client.delete_scan_config(name) Args: - name (str): Required. - The resource name of the ScanConfig to be deleted. The name follows the + name (str): Required. The resource name of the ScanConfig to be deleted. The name follows the format of 'projects/{projectId}/scanConfigs/{scanConfigId}'. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will @@ -406,8 +403,7 @@ def get_scan_config( >>> response = client.get_scan_config(name) Args: - name (str): Required. - The resource name of the ScanConfig to be returned. The name follows the + name (str): Required. The resource name of the ScanConfig to be returned. The name follows the format of 'projects/{projectId}/scanConfigs/{scanConfigId}'. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will @@ -490,8 +486,7 @@ def list_scan_configs( ... pass Args: - parent (str): Required. - The parent resource name, which should be a project resource name in the + parent (str): Required. The parent resource name, which should be a project resource name in the format 'projects/{projectId}'. page_size (int): The maximum number of resources contained in the underlying API response. If page streaming is performed per- @@ -587,8 +582,7 @@ def update_scan_config( >>> response = client.update_scan_config(scan_config, update_mask) Args: - scan_config (Union[dict, ~google.cloud.websecurityscanner_v1beta.types.ScanConfig]): Required. - The ScanConfig to be updated. The name field must be set to identify the + scan_config (Union[dict, ~google.cloud.websecurityscanner_v1beta.types.ScanConfig]): Required. The ScanConfig to be updated. The name field must be set to identify the resource to be updated. The values of fields not covered by the mask will be ignored. @@ -670,8 +664,7 @@ def start_scan_run( >>> response = client.start_scan_run(name) Args: - name (str): Required. - The resource name of the ScanConfig to be used. The name follows the + name (str): Required. The resource name of the ScanConfig to be used. The name follows the format of 'projects/{projectId}/scanConfigs/{scanConfigId}'. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will @@ -741,8 +734,7 @@ def get_scan_run( >>> response = client.get_scan_run(name) Args: - name (str): Required. - The resource name of the ScanRun to be returned. The name follows the + name (str): Required. The resource name of the ScanRun to be returned. The name follows the format of 'projects/{projectId}/scanConfigs/{scanConfigId}/scanRuns/{scanRunId}'. retry (Optional[google.api_core.retry.Retry]): A retry object used @@ -827,8 +819,7 @@ def list_scan_runs( ... pass Args: - parent (str): Required. - The parent resource name, which should be a scan resource name in the + parent (str): Required. The parent resource name, which should be a scan resource name in the format 'projects/{projectId}/scanConfigs/{scanConfigId}'. page_size (int): The maximum number of resources contained in the underlying API response. If page streaming is performed per- @@ -919,8 +910,7 @@ def stop_scan_run( >>> response = client.stop_scan_run(name) Args: - name (str): Required. - The resource name of the ScanRun to be stopped. The name follows the + name (str): Required. The resource name of the ScanRun to be stopped. The name follows the format of 'projects/{projectId}/scanConfigs/{scanConfigId}/scanRuns/{scanRunId}'. retry (Optional[google.api_core.retry.Retry]): A retry object used @@ -1004,8 +994,7 @@ def list_crawled_urls( ... pass Args: - parent (str): Required. - The parent resource name, which should be a scan run resource name in the + parent (str): Required. The parent resource name, which should be a scan run resource name in the format 'projects/{projectId}/scanConfigs/{scanConfigId}/scanRuns/{scanRunId}'. page_size (int): The maximum number of resources contained in the @@ -1097,8 +1086,7 @@ def get_finding( >>> response = client.get_finding(name) Args: - name (str): Required. - The resource name of the Finding to be returned. The name follows the + name (str): Required. The resource name of the Finding to be returned. The name follows the format of 'projects/{projectId}/scanConfigs/{scanConfigId}/scanRuns/{scanRunId}/findings/{findingId}'. retry (Optional[google.api_core.retry.Retry]): A retry object used @@ -1186,12 +1174,11 @@ def list_findings( ... pass Args: - parent (str): Required. - The parent resource name, which should be a scan run resource name in the + parent (str): Required. The parent resource name, which should be a scan run resource name in the format 'projects/{projectId}/scanConfigs/{scanConfigId}/scanRuns/{scanRunId}'. - filter_ (str): The filter expression. The expression must be in the format: . Supported - field: 'finding\_type'. Supported operator: '='. + filter_ (str): Required. The filter expression. The expression must be in the format: . + Supported field: 'finding\_type'. Supported operator: '='. page_size (int): The maximum number of resources contained in the underlying API response. If page streaming is performed per- resource, this parameter does not affect the return value. If page @@ -1281,8 +1268,7 @@ def list_finding_type_stats( >>> response = client.list_finding_type_stats(parent) Args: - parent (str): Required. - The parent resource name, which should be a scan run resource name in the + parent (str): Required. The parent resource name, which should be a scan run resource name in the format 'projects/{projectId}/scanConfigs/{scanConfigId}/scanRuns/{scanRunId}'. retry (Optional[google.api_core.retry.Retry]): A retry object used diff --git a/websecurityscanner/google/cloud/websecurityscanner_v1beta/gapic/web_security_scanner_client_config.py b/websecurityscanner/google/cloud/websecurityscanner_v1beta/gapic/web_security_scanner_client_config.py index d447761e5f4c..61dafbb68866 100644 --- a/websecurityscanner/google/cloud/websecurityscanner_v1beta/gapic/web_security_scanner_client_config.py +++ b/websecurityscanner/google/cloud/websecurityscanner_v1beta/gapic/web_security_scanner_client_config.py @@ -24,7 +24,7 @@ }, "DeleteScanConfig": { "timeout_millis": 60000, - "retry_codes_name": "idempotent", + "retry_codes_name": "non_idempotent", "retry_params_name": "default", }, "GetScanConfig": { diff --git a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/crawled_url.proto b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/crawled_url.proto index 92a0dc663856..9316f8b2a269 100644 --- a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/crawled_url.proto +++ b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/crawled_url.proto @@ -17,8 +17,6 @@ syntax = "proto3"; package google.cloud.websecurityscanner.v1beta; -import "google/api/annotations.proto"; - option go_package = "google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta;websecurityscanner"; option java_multiple_files = true; option java_outer_classname = "CrawledUrlProto"; @@ -29,16 +27,13 @@ option php_namespace = "Google\\Cloud\\WebSecurityScanner\\V1beta"; // Security Scanner Service crawls the web applications, following all links // within the scope of sites, to find the URLs to test against. message CrawledUrl { - // Output only. // The http method of the request that was used to visit the URL, in // uppercase. string http_method = 1; - // Output only. // The URL that was crawled. string url = 2; - // Output only. // The body of the request that was used to visit the URL. string body = 3; } diff --git a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/crawled_url_pb2.py b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/crawled_url_pb2.py index 4fd988ae3ec1..2c38599e41c7 100644 --- a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/crawled_url_pb2.py +++ b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/crawled_url_pb2.py @@ -15,9 +15,6 @@ _sym_db = _symbol_database.Default() -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 - - DESCRIPTOR = _descriptor.FileDescriptor( name="google/cloud/websecurityscanner_v1beta/proto/crawled_url.proto", package="google.cloud.websecurityscanner.v1beta", @@ -26,9 +23,8 @@ "\n*com.google.cloud.websecurityscanner.v1betaB\017CrawledUrlProtoP\001ZXgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta;websecurityscanner\312\002&Google\\Cloud\\WebSecurityScanner\\V1beta" ), serialized_pb=_b( - '\n>google/cloud/websecurityscanner_v1beta/proto/crawled_url.proto\x12&google.cloud.websecurityscanner.v1beta\x1a\x1cgoogle/api/annotations.proto"<\n\nCrawledUrl\x12\x13\n\x0bhttp_method\x18\x01 \x01(\t\x12\x0b\n\x03url\x18\x02 \x01(\t\x12\x0c\n\x04\x62ody\x18\x03 \x01(\tB\xc2\x01\n*com.google.cloud.websecurityscanner.v1betaB\x0f\x43rawledUrlProtoP\x01ZXgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta;websecurityscanner\xca\x02&Google\\Cloud\\WebSecurityScanner\\V1betab\x06proto3' + '\n>google/cloud/websecurityscanner_v1beta/proto/crawled_url.proto\x12&google.cloud.websecurityscanner.v1beta"<\n\nCrawledUrl\x12\x13\n\x0bhttp_method\x18\x01 \x01(\t\x12\x0b\n\x03url\x18\x02 \x01(\t\x12\x0c\n\x04\x62ody\x18\x03 \x01(\tB\xc2\x01\n*com.google.cloud.websecurityscanner.v1betaB\x0f\x43rawledUrlProtoP\x01ZXgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta;websecurityscanner\xca\x02&Google\\Cloud\\WebSecurityScanner\\V1betab\x06proto3' ), - dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR], ) @@ -102,8 +98,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=136, - serialized_end=196, + serialized_start=106, + serialized_end=166, ) DESCRIPTOR.message_types_by_name["CrawledUrl"] = _CRAWLEDURL @@ -123,13 +119,12 @@ Attributes: http_method: - Output only. The http method of the request that was used to - visit the URL, in uppercase. + The http method of the request that was used to visit the URL, + in uppercase. url: - Output only. The URL that was crawled. + The URL that was crawled. body: - Output only. The body of the request that was used to visit - the URL. + The body of the request that was used to visit the URL. """, # @@protoc_insertion_point(class_scope:google.cloud.websecurityscanner.v1beta.CrawledUrl) ), diff --git a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/finding.proto b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/finding.proto index 5e25abb5d061..5f86c9c71d05 100644 --- a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/finding.proto +++ b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/finding.proto @@ -17,7 +17,7 @@ syntax = "proto3"; package google.cloud.websecurityscanner.v1beta; -import "google/api/annotations.proto"; +import "google/api/resource.proto"; import "google/cloud/websecurityscanner/v1beta/finding_addon.proto"; option go_package = "google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta;websecurityscanner"; @@ -29,79 +29,68 @@ option php_namespace = "Google\\Cloud\\WebSecurityScanner\\V1beta"; // A Finding resource represents a vulnerability instance identified during a // ScanRun. message Finding { - // Output only. + option (google.api.resource) = { + type: "websecurityscanner.googleapis.com/Finding" + pattern: "projects/{project}/scanConfigs/{scan_config}/scanRuns/{scan_run}/findings/{finding}" + }; + // The resource name of the Finding. The name follows the format of // 'projects/{projectId}/scanConfigs/{scanConfigId}/scanruns/{scanRunId}/findings/{findingId}'. // The finding IDs are generated by the system. string name = 1; - // Output only. // The type of the Finding. // Detailed and up-to-date information on findings can be found here: // https://cloud.google.com/security-scanner/docs/scan-result-details string finding_type = 2; - // Output only. // The http method of the request that triggered the vulnerability, in // uppercase. string http_method = 3; - // Output only. // The URL produced by the server-side fuzzer and used in the request that // triggered the vulnerability. string fuzzed_url = 4; - // Output only. // The body of the request that triggered the vulnerability. string body = 5; - // Output only. // The description of the vulnerability. string description = 6; - // Output only. // The URL containing human-readable payload that user can leverage to // reproduce the vulnerability. string reproduction_url = 7; - // Output only. // If the vulnerability was originated from nested IFrame, the immediate // parent IFrame is reported. string frame_url = 8; - // Output only. // The URL where the browser lands when the vulnerability is detected. string final_url = 9; - // Output only. // The tracking ID uniquely identifies a vulnerability instance across // multiple ScanRuns. string tracking_id = 10; - // Output only. // An addon containing information reported for a vulnerability with an HTML // form, if any. Form form = 16; - // Output only. // An addon containing information about outdated libraries. OutdatedLibrary outdated_library = 11; - // Output only. // An addon containing detailed information regarding any resource causing the // vulnerability such as JavaScript sources, image, audio files, etc. ViolatingResource violating_resource = 12; - // Output only. // An addon containing information about vulnerable or missing HTTP headers. VulnerableHeaders vulnerable_headers = 15; - // Output only. // An addon containing information about request parameters which were found // to be vulnerable. VulnerableParameters vulnerable_parameters = 13; - // Output only. // An addon containing information reported for an XSS, if any. Xss xss = 14; } diff --git a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/finding_addon.proto b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/finding_addon.proto index d2759b831909..ea5989a13dae 100644 --- a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/finding_addon.proto +++ b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/finding_addon.proto @@ -17,8 +17,6 @@ syntax = "proto3"; package google.cloud.websecurityscanner.v1beta; -import "google/api/annotations.proto"; - option go_package = "google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta;websecurityscanner"; option java_multiple_files = true; option java_outer_classname = "FindingAddonProto"; diff --git a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/finding_addon_pb2.py b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/finding_addon_pb2.py index c029fbb15ca3..ee10ce7f056a 100644 --- a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/finding_addon_pb2.py +++ b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/finding_addon_pb2.py @@ -15,9 +15,6 @@ _sym_db = _symbol_database.Default() -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 - - DESCRIPTOR = _descriptor.FileDescriptor( name="google/cloud/websecurityscanner_v1beta/proto/finding_addon.proto", package="google.cloud.websecurityscanner.v1beta", @@ -26,9 +23,8 @@ "\n*com.google.cloud.websecurityscanner.v1betaB\021FindingAddonProtoP\001ZXgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta;websecurityscanner\312\002&Google\\Cloud\\WebSecurityScanner\\V1beta" ), serialized_pb=_b( - '\n@google/cloud/websecurityscanner_v1beta/proto/finding_addon.proto\x12&google.cloud.websecurityscanner.v1beta\x1a\x1cgoogle/api/annotations.proto"*\n\x04\x46orm\x12\x12\n\naction_uri\x18\x01 \x01(\t\x12\x0e\n\x06\x66ields\x18\x02 \x03(\t"Q\n\x0fOutdatedLibrary\x12\x14\n\x0clibrary_name\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\t\x12\x17\n\x0flearn_more_urls\x18\x03 \x03(\t"?\n\x11ViolatingResource\x12\x14\n\x0c\x63ontent_type\x18\x01 \x01(\t\x12\x14\n\x0cresource_url\x18\x02 \x01(\t"/\n\x14VulnerableParameters\x12\x17\n\x0fparameter_names\x18\x01 \x03(\t"\xe8\x01\n\x11VulnerableHeaders\x12Q\n\x07headers\x18\x01 \x03(\x0b\x32@.google.cloud.websecurityscanner.v1beta.VulnerableHeaders.Header\x12Y\n\x0fmissing_headers\x18\x02 \x03(\x0b\x32@.google.cloud.websecurityscanner.v1beta.VulnerableHeaders.Header\x1a%\n\x06Header\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t"2\n\x03Xss\x12\x14\n\x0cstack_traces\x18\x01 \x03(\t\x12\x15\n\rerror_message\x18\x02 \x01(\tB\xc4\x01\n*com.google.cloud.websecurityscanner.v1betaB\x11\x46indingAddonProtoP\x01ZXgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta;websecurityscanner\xca\x02&Google\\Cloud\\WebSecurityScanner\\V1betab\x06proto3' + '\n@google/cloud/websecurityscanner_v1beta/proto/finding_addon.proto\x12&google.cloud.websecurityscanner.v1beta"*\n\x04\x46orm\x12\x12\n\naction_uri\x18\x01 \x01(\t\x12\x0e\n\x06\x66ields\x18\x02 \x03(\t"Q\n\x0fOutdatedLibrary\x12\x14\n\x0clibrary_name\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\t\x12\x17\n\x0flearn_more_urls\x18\x03 \x03(\t"?\n\x11ViolatingResource\x12\x14\n\x0c\x63ontent_type\x18\x01 \x01(\t\x12\x14\n\x0cresource_url\x18\x02 \x01(\t"/\n\x14VulnerableParameters\x12\x17\n\x0fparameter_names\x18\x01 \x03(\t"\xe8\x01\n\x11VulnerableHeaders\x12Q\n\x07headers\x18\x01 \x03(\x0b\x32@.google.cloud.websecurityscanner.v1beta.VulnerableHeaders.Header\x12Y\n\x0fmissing_headers\x18\x02 \x03(\x0b\x32@.google.cloud.websecurityscanner.v1beta.VulnerableHeaders.Header\x1a%\n\x06Header\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t"2\n\x03Xss\x12\x14\n\x0cstack_traces\x18\x01 \x03(\t\x12\x15\n\rerror_message\x18\x02 \x01(\tB\xc4\x01\n*com.google.cloud.websecurityscanner.v1betaB\x11\x46indingAddonProtoP\x01ZXgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta;websecurityscanner\xca\x02&Google\\Cloud\\WebSecurityScanner\\V1betab\x06proto3' ), - dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR], ) @@ -84,8 +80,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=138, - serialized_end=180, + serialized_start=108, + serialized_end=150, ) @@ -159,8 +155,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=182, - serialized_end=263, + serialized_start=152, + serialized_end=233, ) @@ -216,8 +212,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=265, - serialized_end=328, + serialized_start=235, + serialized_end=298, ) @@ -255,8 +251,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=330, - serialized_end=377, + serialized_start=300, + serialized_end=347, ) @@ -312,8 +308,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=575, - serialized_end=612, + serialized_start=545, + serialized_end=582, ) _VULNERABLEHEADERS = _descriptor.Descriptor( @@ -368,8 +364,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=380, - serialized_end=612, + serialized_start=350, + serialized_end=582, ) @@ -425,8 +421,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=614, - serialized_end=664, + serialized_start=584, + serialized_end=634, ) _VULNERABLEHEADERS_HEADER.containing_type = _VULNERABLEHEADERS diff --git a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/finding_pb2.py b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/finding_pb2.py index 385aac2919bb..1d27a8a23088 100644 --- a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/finding_pb2.py +++ b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/finding_pb2.py @@ -15,7 +15,7 @@ _sym_db = _symbol_database.Default() -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 from google.cloud.websecurityscanner_v1beta.proto import ( finding_addon_pb2 as google_dot_cloud_dot_websecurityscanner__v1beta_dot_proto_dot_finding__addon__pb2, ) @@ -29,10 +29,10 @@ "\n*com.google.cloud.websecurityscanner.v1betaB\014FindingProtoP\001ZXgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta;websecurityscanner\312\002&Google\\Cloud\\WebSecurityScanner\\V1beta" ), serialized_pb=_b( - '\n:google/cloud/websecurityscanner_v1beta/proto/finding.proto\x12&google.cloud.websecurityscanner.v1beta\x1a\x1cgoogle/api/annotations.proto\x1a@google/cloud/websecurityscanner_v1beta/proto/finding_addon.proto"\xa2\x05\n\x07\x46inding\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0c\x66inding_type\x18\x02 \x01(\t\x12\x13\n\x0bhttp_method\x18\x03 \x01(\t\x12\x12\n\nfuzzed_url\x18\x04 \x01(\t\x12\x0c\n\x04\x62ody\x18\x05 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x06 \x01(\t\x12\x18\n\x10reproduction_url\x18\x07 \x01(\t\x12\x11\n\tframe_url\x18\x08 \x01(\t\x12\x11\n\tfinal_url\x18\t \x01(\t\x12\x13\n\x0btracking_id\x18\n \x01(\t\x12:\n\x04\x66orm\x18\x10 \x01(\x0b\x32,.google.cloud.websecurityscanner.v1beta.Form\x12Q\n\x10outdated_library\x18\x0b \x01(\x0b\x32\x37.google.cloud.websecurityscanner.v1beta.OutdatedLibrary\x12U\n\x12violating_resource\x18\x0c \x01(\x0b\x32\x39.google.cloud.websecurityscanner.v1beta.ViolatingResource\x12U\n\x12vulnerable_headers\x18\x0f \x01(\x0b\x32\x39.google.cloud.websecurityscanner.v1beta.VulnerableHeaders\x12[\n\x15vulnerable_parameters\x18\r \x01(\x0b\x32<.google.cloud.websecurityscanner.v1beta.VulnerableParameters\x12\x38\n\x03xss\x18\x0e \x01(\x0b\x32+.google.cloud.websecurityscanner.v1beta.XssB\xbf\x01\n*com.google.cloud.websecurityscanner.v1betaB\x0c\x46indingProtoP\x01ZXgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta;websecurityscanner\xca\x02&Google\\Cloud\\WebSecurityScanner\\V1betab\x06proto3' + '\n:google/cloud/websecurityscanner_v1beta/proto/finding.proto\x12&google.cloud.websecurityscanner.v1beta\x1a\x19google/api/resource.proto\x1a@google/cloud/websecurityscanner_v1beta/proto/finding_addon.proto"\xa9\x06\n\x07\x46inding\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0c\x66inding_type\x18\x02 \x01(\t\x12\x13\n\x0bhttp_method\x18\x03 \x01(\t\x12\x12\n\nfuzzed_url\x18\x04 \x01(\t\x12\x0c\n\x04\x62ody\x18\x05 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x06 \x01(\t\x12\x18\n\x10reproduction_url\x18\x07 \x01(\t\x12\x11\n\tframe_url\x18\x08 \x01(\t\x12\x11\n\tfinal_url\x18\t \x01(\t\x12\x13\n\x0btracking_id\x18\n \x01(\t\x12:\n\x04\x66orm\x18\x10 \x01(\x0b\x32,.google.cloud.websecurityscanner.v1beta.Form\x12Q\n\x10outdated_library\x18\x0b \x01(\x0b\x32\x37.google.cloud.websecurityscanner.v1beta.OutdatedLibrary\x12U\n\x12violating_resource\x18\x0c \x01(\x0b\x32\x39.google.cloud.websecurityscanner.v1beta.ViolatingResource\x12U\n\x12vulnerable_headers\x18\x0f \x01(\x0b\x32\x39.google.cloud.websecurityscanner.v1beta.VulnerableHeaders\x12[\n\x15vulnerable_parameters\x18\r \x01(\x0b\x32<.google.cloud.websecurityscanner.v1beta.VulnerableParameters\x12\x38\n\x03xss\x18\x0e \x01(\x0b\x32+.google.cloud.websecurityscanner.v1beta.Xss:\x84\x01\xea\x41\x80\x01\n)websecurityscanner.googleapis.com/Finding\x12Sprojects/{project}/scanConfigs/{scan_config}/scanRuns/{scan_run}/findings/{finding}B\xbf\x01\n*com.google.cloud.websecurityscanner.v1betaB\x0c\x46indingProtoP\x01ZXgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta;websecurityscanner\xca\x02&Google\\Cloud\\WebSecurityScanner\\V1betab\x06proto3' ), dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_resource__pb2.DESCRIPTOR, google_dot_cloud_dot_websecurityscanner__v1beta_dot_proto_dot_finding__addon__pb2.DESCRIPTOR, ], ) @@ -337,13 +337,15 @@ extensions=[], nested_types=[], enum_types=[], - serialized_options=None, + serialized_options=_b( + "\352A\200\001\n)websecurityscanner.googleapis.com/Finding\022Sprojects/{project}/scanConfigs/{scan_config}/scanRuns/{scan_run}/findings/{finding}" + ), is_extendable=False, syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=199, - serialized_end=873, + serialized_start=196, + serialized_end=1005, ) _FINDING.fields_by_name[ @@ -391,57 +393,54 @@ Attributes: name: - Output only. The resource name of the Finding. The name - follows the format of 'projects/{projectId}/scanConfigs/{scanC - onfigId}/scanruns/{scanRunId}/findings/{findingId}'. The - finding IDs are generated by the system. + The resource name of the Finding. The name follows the format + of 'projects/{projectId}/scanConfigs/{scanConfigId}/scanruns/{ + scanRunId}/findings/{findingId}'. The finding IDs are + generated by the system. finding_type: - Output only. The type of the Finding. Detailed and up-to-date - information on findings can be found here: + The type of the Finding. Detailed and up-to-date information + on findings can be found here: https://cloud.google.com/security-scanner/docs/scan-result- details http_method: - Output only. The http method of the request that triggered the + The http method of the request that triggered the vulnerability, in uppercase. fuzzed_url: - Output only. The URL produced by the server-side fuzzer and - used in the request that triggered the vulnerability. + The URL produced by the server-side fuzzer and used in the + request that triggered the vulnerability. body: - Output only. The body of the request that triggered the - vulnerability. + The body of the request that triggered the vulnerability. description: - Output only. The description of the vulnerability. + The description of the vulnerability. reproduction_url: - Output only. The URL containing human-readable payload that - user can leverage to reproduce the vulnerability. + The URL containing human-readable payload that user can + leverage to reproduce the vulnerability. frame_url: - Output only. If the vulnerability was originated from nested - IFrame, the immediate parent IFrame is reported. + If the vulnerability was originated from nested IFrame, the + immediate parent IFrame is reported. final_url: - Output only. The URL where the browser lands when the - vulnerability is detected. + The URL where the browser lands when the vulnerability is + detected. tracking_id: - Output only. The tracking ID uniquely identifies a - vulnerability instance across multiple ScanRuns. + The tracking ID uniquely identifies a vulnerability instance + across multiple ScanRuns. form: - Output only. An addon containing information reported for a - vulnerability with an HTML form, if any. + An addon containing information reported for a vulnerability + with an HTML form, if any. outdated_library: - Output only. An addon containing information about outdated - libraries. + An addon containing information about outdated libraries. violating_resource: - Output only. An addon containing detailed information - regarding any resource causing the vulnerability such as - JavaScript sources, image, audio files, etc. + An addon containing detailed information regarding any + resource causing the vulnerability such as JavaScript sources, + image, audio files, etc. vulnerable_headers: - Output only. An addon containing information about vulnerable - or missing HTTP headers. + An addon containing information about vulnerable or missing + HTTP headers. vulnerable_parameters: - Output only. An addon containing information about request - parameters which were found to be vulnerable. + An addon containing information about request parameters which + were found to be vulnerable. xss: - Output only. An addon containing information reported for an - XSS, if any. + An addon containing information reported for an XSS, if any. """, # @@protoc_insertion_point(class_scope:google.cloud.websecurityscanner.v1beta.Finding) ), @@ -450,4 +449,5 @@ DESCRIPTOR._options = None +_FINDING._options = None # @@protoc_insertion_point(module_scope) diff --git a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/finding_type_stats.proto b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/finding_type_stats.proto index cbace23d3433..97f4882d1540 100644 --- a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/finding_type_stats.proto +++ b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/finding_type_stats.proto @@ -17,8 +17,6 @@ syntax = "proto3"; package google.cloud.websecurityscanner.v1beta; -import "google/api/annotations.proto"; - option go_package = "google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta;websecurityscanner"; option java_multiple_files = true; option java_outer_classname = "FindingTypeStatsProto"; @@ -28,11 +26,9 @@ option php_namespace = "Google\\Cloud\\WebSecurityScanner\\V1beta"; // A FindingTypeStats resource represents stats regarding a specific FindingType // of Findings under a given ScanRun. message FindingTypeStats { - // Output only. // The finding type associated with the stats. string finding_type = 1; - // Output only. // The count of findings belonging to this finding type. int32 finding_count = 2; } diff --git a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/finding_type_stats_pb2.py b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/finding_type_stats_pb2.py index 3fa52a7f3321..2c522f67dcf3 100644 --- a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/finding_type_stats_pb2.py +++ b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/finding_type_stats_pb2.py @@ -15,9 +15,6 @@ _sym_db = _symbol_database.Default() -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 - - DESCRIPTOR = _descriptor.FileDescriptor( name="google/cloud/websecurityscanner_v1beta/proto/finding_type_stats.proto", package="google.cloud.websecurityscanner.v1beta", @@ -26,9 +23,8 @@ "\n*com.google.cloud.websecurityscanner.v1betaB\025FindingTypeStatsProtoP\001ZXgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta;websecurityscanner\312\002&Google\\Cloud\\WebSecurityScanner\\V1beta" ), serialized_pb=_b( - '\nEgoogle/cloud/websecurityscanner_v1beta/proto/finding_type_stats.proto\x12&google.cloud.websecurityscanner.v1beta\x1a\x1cgoogle/api/annotations.proto"?\n\x10\x46indingTypeStats\x12\x14\n\x0c\x66inding_type\x18\x01 \x01(\t\x12\x15\n\rfinding_count\x18\x02 \x01(\x05\x42\xc8\x01\n*com.google.cloud.websecurityscanner.v1betaB\x15\x46indingTypeStatsProtoP\x01ZXgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta;websecurityscanner\xca\x02&Google\\Cloud\\WebSecurityScanner\\V1betab\x06proto3' + '\nEgoogle/cloud/websecurityscanner_v1beta/proto/finding_type_stats.proto\x12&google.cloud.websecurityscanner.v1beta"?\n\x10\x46indingTypeStats\x12\x14\n\x0c\x66inding_type\x18\x01 \x01(\t\x12\x15\n\rfinding_count\x18\x02 \x01(\x05\x42\xc8\x01\n*com.google.cloud.websecurityscanner.v1betaB\x15\x46indingTypeStatsProtoP\x01ZXgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta;websecurityscanner\xca\x02&Google\\Cloud\\WebSecurityScanner\\V1betab\x06proto3' ), - dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR], ) @@ -84,8 +80,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=143, - serialized_end=206, + serialized_start=113, + serialized_end=176, ) DESCRIPTOR.message_types_by_name["FindingTypeStats"] = _FINDINGTYPESTATS @@ -103,10 +99,9 @@ Attributes: finding_type: - Output only. The finding type associated with the stats. + The finding type associated with the stats. finding_count: - Output only. The count of findings belonging to this finding - type. + The count of findings belonging to this finding type. """, # @@protoc_insertion_point(class_scope:google.cloud.websecurityscanner.v1beta.FindingTypeStats) ), diff --git a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_config.proto b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_config.proto index d698bb4f886e..c2b7dcb31727 100644 --- a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_config.proto +++ b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_config.proto @@ -17,7 +17,8 @@ syntax = "proto3"; package google.cloud.websecurityscanner.v1beta; -import "google/api/annotations.proto"; +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; import "google/cloud/websecurityscanner/v1beta/scan_run.proto"; import "google/protobuf/timestamp.proto"; @@ -29,36 +30,40 @@ option php_namespace = "Google\\Cloud\\WebSecurityScanner\\V1beta"; // A ScanConfig resource contains the configurations to launch a scan. message ScanConfig { + option (google.api.resource) = { + type: "websecurityscanner.googleapis.com/ScanConfig" + pattern: "projects/{project}/scanConfigs/{scan_config}" + }; + // Scan authentication configuration. message Authentication { // Describes authentication configuration that uses a Google account. message GoogleAccount { - // Required. - // The user name of the Google account. - string username = 1; + // Required. The user name of the Google account. + string username = 1 [(google.api.field_behavior) = REQUIRED]; - // Input only. - // Required. - // The password of the Google account. The credential is stored encrypted + // Required. Input only. The password of the Google account. The credential is stored encrypted // and not returned in any response nor included in audit logs. - string password = 2; + string password = 2 [ + (google.api.field_behavior) = REQUIRED, + (google.api.field_behavior) = INPUT_ONLY + ]; } // Describes authentication configuration that uses a custom account. message CustomAccount { - // Required. - // The user name of the custom account. - string username = 1; + // Required. The user name of the custom account. + string username = 1 [(google.api.field_behavior) = REQUIRED]; - // Input only. - // Required. - // The password of the custom account. The credential is stored encrypted + // Required. Input only. The password of the custom account. The credential is stored encrypted // and not returned in any response nor included in audit logs. - string password = 2; + string password = 2 [ + (google.api.field_behavior) = REQUIRED, + (google.api.field_behavior) = INPUT_ONLY + ]; - // Required. - // The login form URL of the website. - string login_url = 3; + // Required. The login form URL of the website. + string login_url = 3 [(google.api.field_behavior) = REQUIRED]; } // Required. @@ -80,9 +85,8 @@ message ScanConfig { // immediately. google.protobuf.Timestamp schedule_time = 1; - // Required. - // The duration of time between executions in days. - int32 interval_duration_days = 2; + // Required. The duration of time between executions in days. + int32 interval_duration_days = 2 [(google.api.field_behavior) = REQUIRED]; } // Type of user agents used for scanning. @@ -145,9 +149,8 @@ message ScanConfig { // generated by the system. string name = 1; - // Required. - // The user provided display name of the ScanConfig. - string display_name = 2; + // Required. The user provided display name of the ScanConfig. + string display_name = 2 [(google.api.field_behavior) = REQUIRED]; // The maximum QPS during scanning. A valid value ranges from 5 to 20 // inclusively. If the field is unspecified or its value is set 0, server will @@ -155,9 +158,8 @@ message ScanConfig { // INVALID_ARGUMENT error. int32 max_qps = 3; - // Required. - // The starting URLs from which the scanner finds site pages. - repeated string starting_urls = 4; + // Required. The starting URLs from which the scanner finds site pages. + repeated string starting_urls = 4 [(google.api.field_behavior) = REQUIRED]; // The authentication configuration. If specified, service will use the // authentication configuration during scanning. diff --git a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_config_error.proto b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_config_error.proto index c55c744c6272..a50bdcaf37f6 100644 --- a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_config_error.proto +++ b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_config_error.proto @@ -30,7 +30,7 @@ option php_namespace = "Google\\Cloud\\WebSecurityScanner\\V1beta"; message ScanConfigError { // Output only. // Defines an error reason code. - // Next id: 43 + // Next id: 44 enum Code { option allow_alias = true; @@ -106,6 +106,9 @@ message ScanConfigError { // Google Compute Engine Scanning Alpha access. FORBIDDEN_TO_SCAN_COMPUTE = 21; + // User tries to update managed scan + FORBIDDEN_UPDATE_TO_MANAGED_SCAN = 43; + // The supplied filter is malformed. For example, it can not be parsed, does // not have a filter type in expression, or the same filter type appears // more than once. @@ -174,11 +177,9 @@ message ScanConfigError { UNSUPPORTED_URL_SCHEME = 42; } - // Output only. // Indicates the reason code for a configuration failure. Code code = 1; - // Output only. // Indicates the full name of the ScanConfig field that triggers this error, // for example "scan_config.max_qps". This field is provided for // troubleshooting purposes only and its actual value can change in the diff --git a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_config_error_pb2.py b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_config_error_pb2.py index 03dfa6cae009..4d907be487b3 100644 --- a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_config_error_pb2.py +++ b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_config_error_pb2.py @@ -23,7 +23,7 @@ "\n*com.google.cloud.websecurityscanner.v1betaB\024ScanConfigErrorProtoP\001ZXgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta;websecurityscanner\312\002&Google\\Cloud\\WebSecurityScanner\\V1beta" ), serialized_pb=_b( - '\nDgoogle/cloud/websecurityscanner_v1beta/proto/scan_config_error.proto\x12&google.cloud.websecurityscanner.v1beta"\xc7\x0b\n\x0fScanConfigError\x12J\n\x04\x63ode\x18\x01 \x01(\x0e\x32<.google.cloud.websecurityscanner.v1beta.ScanConfigError.Code\x12\x12\n\nfield_name\x18\x02 \x01(\t"\xd3\n\n\x04\x43ode\x12\x14\n\x10\x43ODE_UNSPECIFIED\x10\x00\x12\x06\n\x02OK\x10\x00\x12\x12\n\x0eINTERNAL_ERROR\x10\x01\x12\x1f\n\x1b\x41PPENGINE_API_BACKEND_ERROR\x10\x02\x12 \n\x1c\x41PPENGINE_API_NOT_ACCESSIBLE\x10\x03\x12"\n\x1e\x41PPENGINE_DEFAULT_HOST_MISSING\x10\x04\x12!\n\x1d\x43\x41NNOT_USE_GOOGLE_COM_ACCOUNT\x10\x06\x12\x1c\n\x18\x43\x41NNOT_USE_OWNER_ACCOUNT\x10\x07\x12\x1d\n\x19\x43OMPUTE_API_BACKEND_ERROR\x10\x08\x12\x1e\n\x1a\x43OMPUTE_API_NOT_ACCESSIBLE\x10\t\x12\x37\n3CUSTOM_LOGIN_URL_DOES_NOT_BELONG_TO_CURRENT_PROJECT\x10\n\x12\x1e\n\x1a\x43USTOM_LOGIN_URL_MALFORMED\x10\x0b\x12\x33\n/CUSTOM_LOGIN_URL_MAPPED_TO_NON_ROUTABLE_ADDRESS\x10\x0c\x12\x31\n-CUSTOM_LOGIN_URL_MAPPED_TO_UNRESERVED_ADDRESS\x10\r\x12\x30\n,CUSTOM_LOGIN_URL_HAS_NON_ROUTABLE_IP_ADDRESS\x10\x0e\x12.\n*CUSTOM_LOGIN_URL_HAS_UNRESERVED_IP_ADDRESS\x10\x0f\x12\x17\n\x13\x44UPLICATE_SCAN_NAME\x10\x10\x12\x17\n\x13INVALID_FIELD_VALUE\x10\x12\x12$\n FAILED_TO_AUTHENTICATE_TO_TARGET\x10\x13\x12\x1c\n\x18\x46INDING_TYPE_UNSPECIFIED\x10\x14\x12\x1d\n\x19\x46ORBIDDEN_TO_SCAN_COMPUTE\x10\x15\x12\x14\n\x10MALFORMED_FILTER\x10\x16\x12\x1b\n\x17MALFORMED_RESOURCE_NAME\x10\x17\x12\x14\n\x10PROJECT_INACTIVE\x10\x18\x12\x12\n\x0eREQUIRED_FIELD\x10\x19\x12\x1e\n\x1aRESOURCE_NAME_INCONSISTENT\x10\x1a\x12\x18\n\x14SCAN_ALREADY_RUNNING\x10\x1b\x12\x14\n\x10SCAN_NOT_RUNNING\x10\x1c\x12/\n+SEED_URL_DOES_NOT_BELONG_TO_CURRENT_PROJECT\x10\x1d\x12\x16\n\x12SEED_URL_MALFORMED\x10\x1e\x12+\n\'SEED_URL_MAPPED_TO_NON_ROUTABLE_ADDRESS\x10\x1f\x12)\n%SEED_URL_MAPPED_TO_UNRESERVED_ADDRESS\x10 \x12(\n$SEED_URL_HAS_NON_ROUTABLE_IP_ADDRESS\x10!\x12&\n"SEED_URL_HAS_UNRESERVED_IP_ADDRESS\x10#\x12"\n\x1eSERVICE_ACCOUNT_NOT_CONFIGURED\x10$\x12\x12\n\x0eTOO_MANY_SCANS\x10%\x12"\n\x1eUNABLE_TO_RESOLVE_PROJECT_INFO\x10&\x12(\n$UNSUPPORTED_BLACKLIST_PATTERN_FORMAT\x10\'\x12\x16\n\x12UNSUPPORTED_FILTER\x10(\x12\x1c\n\x18UNSUPPORTED_FINDING_TYPE\x10)\x12\x1a\n\x16UNSUPPORTED_URL_SCHEME\x10*\x1a\x02\x10\x01\x42\xc7\x01\n*com.google.cloud.websecurityscanner.v1betaB\x14ScanConfigErrorProtoP\x01ZXgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta;websecurityscanner\xca\x02&Google\\Cloud\\WebSecurityScanner\\V1betab\x06proto3' + '\nDgoogle/cloud/websecurityscanner_v1beta/proto/scan_config_error.proto\x12&google.cloud.websecurityscanner.v1beta"\xed\x0b\n\x0fScanConfigError\x12J\n\x04\x63ode\x18\x01 \x01(\x0e\x32<.google.cloud.websecurityscanner.v1beta.ScanConfigError.Code\x12\x12\n\nfield_name\x18\x02 \x01(\t"\xf9\n\n\x04\x43ode\x12\x14\n\x10\x43ODE_UNSPECIFIED\x10\x00\x12\x06\n\x02OK\x10\x00\x12\x12\n\x0eINTERNAL_ERROR\x10\x01\x12\x1f\n\x1b\x41PPENGINE_API_BACKEND_ERROR\x10\x02\x12 \n\x1c\x41PPENGINE_API_NOT_ACCESSIBLE\x10\x03\x12"\n\x1e\x41PPENGINE_DEFAULT_HOST_MISSING\x10\x04\x12!\n\x1d\x43\x41NNOT_USE_GOOGLE_COM_ACCOUNT\x10\x06\x12\x1c\n\x18\x43\x41NNOT_USE_OWNER_ACCOUNT\x10\x07\x12\x1d\n\x19\x43OMPUTE_API_BACKEND_ERROR\x10\x08\x12\x1e\n\x1a\x43OMPUTE_API_NOT_ACCESSIBLE\x10\t\x12\x37\n3CUSTOM_LOGIN_URL_DOES_NOT_BELONG_TO_CURRENT_PROJECT\x10\n\x12\x1e\n\x1a\x43USTOM_LOGIN_URL_MALFORMED\x10\x0b\x12\x33\n/CUSTOM_LOGIN_URL_MAPPED_TO_NON_ROUTABLE_ADDRESS\x10\x0c\x12\x31\n-CUSTOM_LOGIN_URL_MAPPED_TO_UNRESERVED_ADDRESS\x10\r\x12\x30\n,CUSTOM_LOGIN_URL_HAS_NON_ROUTABLE_IP_ADDRESS\x10\x0e\x12.\n*CUSTOM_LOGIN_URL_HAS_UNRESERVED_IP_ADDRESS\x10\x0f\x12\x17\n\x13\x44UPLICATE_SCAN_NAME\x10\x10\x12\x17\n\x13INVALID_FIELD_VALUE\x10\x12\x12$\n FAILED_TO_AUTHENTICATE_TO_TARGET\x10\x13\x12\x1c\n\x18\x46INDING_TYPE_UNSPECIFIED\x10\x14\x12\x1d\n\x19\x46ORBIDDEN_TO_SCAN_COMPUTE\x10\x15\x12$\n FORBIDDEN_UPDATE_TO_MANAGED_SCAN\x10+\x12\x14\n\x10MALFORMED_FILTER\x10\x16\x12\x1b\n\x17MALFORMED_RESOURCE_NAME\x10\x17\x12\x14\n\x10PROJECT_INACTIVE\x10\x18\x12\x12\n\x0eREQUIRED_FIELD\x10\x19\x12\x1e\n\x1aRESOURCE_NAME_INCONSISTENT\x10\x1a\x12\x18\n\x14SCAN_ALREADY_RUNNING\x10\x1b\x12\x14\n\x10SCAN_NOT_RUNNING\x10\x1c\x12/\n+SEED_URL_DOES_NOT_BELONG_TO_CURRENT_PROJECT\x10\x1d\x12\x16\n\x12SEED_URL_MALFORMED\x10\x1e\x12+\n\'SEED_URL_MAPPED_TO_NON_ROUTABLE_ADDRESS\x10\x1f\x12)\n%SEED_URL_MAPPED_TO_UNRESERVED_ADDRESS\x10 \x12(\n$SEED_URL_HAS_NON_ROUTABLE_IP_ADDRESS\x10!\x12&\n"SEED_URL_HAS_UNRESERVED_IP_ADDRESS\x10#\x12"\n\x1eSERVICE_ACCOUNT_NOT_CONFIGURED\x10$\x12\x12\n\x0eTOO_MANY_SCANS\x10%\x12"\n\x1eUNABLE_TO_RESOLVE_PROJECT_INFO\x10&\x12(\n$UNSUPPORTED_BLACKLIST_PATTERN_FORMAT\x10\'\x12\x16\n\x12UNSUPPORTED_FILTER\x10(\x12\x1c\n\x18UNSUPPORTED_FINDING_TYPE\x10)\x12\x1a\n\x16UNSUPPORTED_URL_SCHEME\x10*\x1a\x02\x10\x01\x42\xc7\x01\n*com.google.cloud.websecurityscanner.v1betaB\x14ScanConfigErrorProtoP\x01ZXgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta;websecurityscanner\xca\x02&Google\\Cloud\\WebSecurityScanner\\V1betab\x06proto3' ), ) @@ -174,141 +174,148 @@ type=None, ), _descriptor.EnumValueDescriptor( - name="MALFORMED_FILTER", + name="FORBIDDEN_UPDATE_TO_MANAGED_SCAN", index=21, + number=43, + serialized_options=None, + type=None, + ), + _descriptor.EnumValueDescriptor( + name="MALFORMED_FILTER", + index=22, number=22, serialized_options=None, type=None, ), _descriptor.EnumValueDescriptor( name="MALFORMED_RESOURCE_NAME", - index=22, + index=23, number=23, serialized_options=None, type=None, ), _descriptor.EnumValueDescriptor( name="PROJECT_INACTIVE", - index=23, + index=24, number=24, serialized_options=None, type=None, ), _descriptor.EnumValueDescriptor( name="REQUIRED_FIELD", - index=24, + index=25, number=25, serialized_options=None, type=None, ), _descriptor.EnumValueDescriptor( name="RESOURCE_NAME_INCONSISTENT", - index=25, + index=26, number=26, serialized_options=None, type=None, ), _descriptor.EnumValueDescriptor( name="SCAN_ALREADY_RUNNING", - index=26, + index=27, number=27, serialized_options=None, type=None, ), _descriptor.EnumValueDescriptor( name="SCAN_NOT_RUNNING", - index=27, + index=28, number=28, serialized_options=None, type=None, ), _descriptor.EnumValueDescriptor( name="SEED_URL_DOES_NOT_BELONG_TO_CURRENT_PROJECT", - index=28, + index=29, number=29, serialized_options=None, type=None, ), _descriptor.EnumValueDescriptor( name="SEED_URL_MALFORMED", - index=29, + index=30, number=30, serialized_options=None, type=None, ), _descriptor.EnumValueDescriptor( name="SEED_URL_MAPPED_TO_NON_ROUTABLE_ADDRESS", - index=30, + index=31, number=31, serialized_options=None, type=None, ), _descriptor.EnumValueDescriptor( name="SEED_URL_MAPPED_TO_UNRESERVED_ADDRESS", - index=31, + index=32, number=32, serialized_options=None, type=None, ), _descriptor.EnumValueDescriptor( name="SEED_URL_HAS_NON_ROUTABLE_IP_ADDRESS", - index=32, + index=33, number=33, serialized_options=None, type=None, ), _descriptor.EnumValueDescriptor( name="SEED_URL_HAS_UNRESERVED_IP_ADDRESS", - index=33, + index=34, number=35, serialized_options=None, type=None, ), _descriptor.EnumValueDescriptor( name="SERVICE_ACCOUNT_NOT_CONFIGURED", - index=34, + index=35, number=36, serialized_options=None, type=None, ), _descriptor.EnumValueDescriptor( name="TOO_MANY_SCANS", - index=35, + index=36, number=37, serialized_options=None, type=None, ), _descriptor.EnumValueDescriptor( name="UNABLE_TO_RESOLVE_PROJECT_INFO", - index=36, + index=37, number=38, serialized_options=None, type=None, ), _descriptor.EnumValueDescriptor( name="UNSUPPORTED_BLACKLIST_PATTERN_FORMAT", - index=37, + index=38, number=39, serialized_options=None, type=None, ), _descriptor.EnumValueDescriptor( name="UNSUPPORTED_FILTER", - index=38, + index=39, number=40, serialized_options=None, type=None, ), _descriptor.EnumValueDescriptor( name="UNSUPPORTED_FINDING_TYPE", - index=39, + index=40, number=41, serialized_options=None, type=None, ), _descriptor.EnumValueDescriptor( name="UNSUPPORTED_URL_SCHEME", - index=40, + index=41, number=42, serialized_options=None, type=None, @@ -317,7 +324,7 @@ containing_type=None, serialized_options=_b("\020\001"), serialized_start=229, - serialized_end=1592, + serialized_end=1630, ) _sym_db.RegisterEnumDescriptor(_SCANCONFIGERROR_CODE) @@ -375,7 +382,7 @@ extension_ranges=[], oneofs=[], serialized_start=113, - serialized_end=1592, + serialized_end=1630, ) _SCANCONFIGERROR.fields_by_name["code"].enum_type = _SCANCONFIGERROR_CODE @@ -397,13 +404,12 @@ Attributes: code: - Output only. Indicates the reason code for a configuration - failure. + Indicates the reason code for a configuration failure. field_name: - Output only. Indicates the full name of the ScanConfig field - that triggers this error, for example "scan\_config.max\_qps". - This field is provided for troubleshooting purposes only and - its actual value can change in the future. + Indicates the full name of the ScanConfig field that triggers + this error, for example "scan\_config.max\_qps". This field is + provided for troubleshooting purposes only and its actual + value can change in the future. """, # @@protoc_insertion_point(class_scope:google.cloud.websecurityscanner.v1beta.ScanConfigError) ), diff --git a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_config_pb2.py b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_config_pb2.py index 79752f0de426..c7290be00774 100644 --- a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_config_pb2.py +++ b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_config_pb2.py @@ -15,7 +15,8 @@ _sym_db = _symbol_database.Default() -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 +from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 from google.cloud.websecurityscanner_v1beta.proto import ( scan_run_pb2 as google_dot_cloud_dot_websecurityscanner__v1beta_dot_proto_dot_scan__run__pb2, ) @@ -30,10 +31,11 @@ "\n*com.google.cloud.websecurityscanner.v1betaB\017ScanConfigProtoP\001ZXgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta;websecurityscanner\312\002&Google\\Cloud\\WebSecurityScanner\\V1beta" ), serialized_pb=_b( - '\n>google/cloud/websecurityscanner_v1beta/proto/scan_config.proto\x12&google.cloud.websecurityscanner.v1beta\x1a\x1cgoogle/api/annotations.proto\x1a;google/cloud/websecurityscanner_v1beta/proto/scan_run.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\x97\x0c\n\nScanConfig\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x02 \x01(\t\x12\x0f\n\x07max_qps\x18\x03 \x01(\x05\x12\x15\n\rstarting_urls\x18\x04 \x03(\t\x12Y\n\x0e\x61uthentication\x18\x05 \x01(\x0b\x32\x41.google.cloud.websecurityscanner.v1beta.ScanConfig.Authentication\x12P\n\nuser_agent\x18\x06 \x01(\x0e\x32<.google.cloud.websecurityscanner.v1beta.ScanConfig.UserAgent\x12\x1a\n\x12\x62lacklist_patterns\x18\x07 \x03(\t\x12M\n\x08schedule\x18\x08 \x01(\x0b\x32;.google.cloud.websecurityscanner.v1beta.ScanConfig.Schedule\x12[\n\x10target_platforms\x18\t \x03(\x0e\x32\x41.google.cloud.websecurityscanner.v1beta.ScanConfig.TargetPlatform\x12{\n!export_to_security_command_center\x18\n \x01(\x0e\x32P.google.cloud.websecurityscanner.v1beta.ScanConfig.ExportToSecurityCommandCenter\x12\x43\n\nlatest_run\x18\x0b \x01(\x0b\x32/.google.cloud.websecurityscanner.v1beta.ScanRun\x12P\n\nrisk_level\x18\x0c \x01(\x0e\x32<.google.cloud.websecurityscanner.v1beta.ScanConfig.RiskLevel\x1a\xf5\x02\n\x0e\x41uthentication\x12i\n\x0egoogle_account\x18\x01 \x01(\x0b\x32O.google.cloud.websecurityscanner.v1beta.ScanConfig.Authentication.GoogleAccountH\x00\x12i\n\x0e\x63ustom_account\x18\x02 \x01(\x0b\x32O.google.cloud.websecurityscanner.v1beta.ScanConfig.Authentication.CustomAccountH\x00\x1a\x33\n\rGoogleAccount\x12\x10\n\x08username\x18\x01 \x01(\t\x12\x10\n\x08password\x18\x02 \x01(\t\x1a\x46\n\rCustomAccount\x12\x10\n\x08username\x18\x01 \x01(\t\x12\x10\n\x08password\x18\x02 \x01(\t\x12\x11\n\tlogin_url\x18\x03 \x01(\tB\x10\n\x0e\x61uthentication\x1a]\n\x08Schedule\x12\x31\n\rschedule_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x1e\n\x16interval_duration_days\x18\x02 \x01(\x05"`\n\tUserAgent\x12\x1a\n\x16USER_AGENT_UNSPECIFIED\x10\x00\x12\x10\n\x0c\x43HROME_LINUX\x10\x01\x12\x12\n\x0e\x43HROME_ANDROID\x10\x02\x12\x11\n\rSAFARI_IPHONE\x10\x03"N\n\x0eTargetPlatform\x12\x1f\n\x1bTARGET_PLATFORM_UNSPECIFIED\x10\x00\x12\x0e\n\nAPP_ENGINE\x10\x01\x12\x0b\n\x07\x43OMPUTE\x10\x02"<\n\tRiskLevel\x12\x1a\n\x16RISK_LEVEL_UNSPECIFIED\x10\x00\x12\n\n\x06NORMAL\x10\x01\x12\x07\n\x03LOW\x10\x02"m\n\x1d\x45xportToSecurityCommandCenter\x12\x31\n-EXPORT_TO_SECURITY_COMMAND_CENTER_UNSPECIFIED\x10\x00\x12\x0b\n\x07\x45NABLED\x10\x01\x12\x0c\n\x08\x44ISABLED\x10\x02\x42\xc2\x01\n*com.google.cloud.websecurityscanner.v1betaB\x0fScanConfigProtoP\x01ZXgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta;websecurityscanner\xca\x02&Google\\Cloud\\WebSecurityScanner\\V1betab\x06proto3' + '\n>google/cloud/websecurityscanner_v1beta/proto/scan_config.proto\x12&google.cloud.websecurityscanner.v1beta\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a;google/cloud/websecurityscanner_v1beta/proto/scan_run.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xa6\r\n\nScanConfig\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x19\n\x0c\x64isplay_name\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x0f\n\x07max_qps\x18\x03 \x01(\x05\x12\x1a\n\rstarting_urls\x18\x04 \x03(\tB\x03\xe0\x41\x02\x12Y\n\x0e\x61uthentication\x18\x05 \x01(\x0b\x32\x41.google.cloud.websecurityscanner.v1beta.ScanConfig.Authentication\x12P\n\nuser_agent\x18\x06 \x01(\x0e\x32<.google.cloud.websecurityscanner.v1beta.ScanConfig.UserAgent\x12\x1a\n\x12\x62lacklist_patterns\x18\x07 \x03(\t\x12M\n\x08schedule\x18\x08 \x01(\x0b\x32;.google.cloud.websecurityscanner.v1beta.ScanConfig.Schedule\x12[\n\x10target_platforms\x18\t \x03(\x0e\x32\x41.google.cloud.websecurityscanner.v1beta.ScanConfig.TargetPlatform\x12{\n!export_to_security_command_center\x18\n \x01(\x0e\x32P.google.cloud.websecurityscanner.v1beta.ScanConfig.ExportToSecurityCommandCenter\x12\x43\n\nlatest_run\x18\x0b \x01(\x0b\x32/.google.cloud.websecurityscanner.v1beta.ScanRun\x12P\n\nrisk_level\x18\x0c \x01(\x0e\x32<.google.cloud.websecurityscanner.v1beta.ScanConfig.RiskLevel\x1a\x94\x03\n\x0e\x41uthentication\x12i\n\x0egoogle_account\x18\x01 \x01(\x0b\x32O.google.cloud.websecurityscanner.v1beta.ScanConfig.Authentication.GoogleAccountH\x00\x12i\n\x0e\x63ustom_account\x18\x02 \x01(\x0b\x32O.google.cloud.websecurityscanner.v1beta.ScanConfig.Authentication.CustomAccountH\x00\x1a@\n\rGoogleAccount\x12\x15\n\x08username\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x18\n\x08password\x18\x02 \x01(\tB\x06\xe0\x41\x02\xe0\x41\x04\x1aX\n\rCustomAccount\x12\x15\n\x08username\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x18\n\x08password\x18\x02 \x01(\tB\x06\xe0\x41\x02\xe0\x41\x04\x12\x16\n\tlogin_url\x18\x03 \x01(\tB\x03\xe0\x41\x02\x42\x10\n\x0e\x61uthentication\x1a\x62\n\x08Schedule\x12\x31\n\rschedule_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12#\n\x16interval_duration_days\x18\x02 \x01(\x05\x42\x03\xe0\x41\x02"`\n\tUserAgent\x12\x1a\n\x16USER_AGENT_UNSPECIFIED\x10\x00\x12\x10\n\x0c\x43HROME_LINUX\x10\x01\x12\x12\n\x0e\x43HROME_ANDROID\x10\x02\x12\x11\n\rSAFARI_IPHONE\x10\x03"N\n\x0eTargetPlatform\x12\x1f\n\x1bTARGET_PLATFORM_UNSPECIFIED\x10\x00\x12\x0e\n\nAPP_ENGINE\x10\x01\x12\x0b\n\x07\x43OMPUTE\x10\x02"<\n\tRiskLevel\x12\x1a\n\x16RISK_LEVEL_UNSPECIFIED\x10\x00\x12\n\n\x06NORMAL\x10\x01\x12\x07\n\x03LOW\x10\x02"m\n\x1d\x45xportToSecurityCommandCenter\x12\x31\n-EXPORT_TO_SECURITY_COMMAND_CENTER_UNSPECIFIED\x10\x00\x12\x0b\n\x07\x45NABLED\x10\x01\x12\x0c\n\x08\x44ISABLED\x10\x02:_\xea\x41\\\n,websecurityscanner.googleapis.com/ScanConfig\x12,projects/{project}/scanConfigs/{scan_config}B\xc2\x01\n*com.google.cloud.websecurityscanner.v1betaB\x0fScanConfigProtoP\x01ZXgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta;websecurityscanner\xca\x02&Google\\Cloud\\WebSecurityScanner\\V1betab\x06proto3' ), dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, + google_dot_api_dot_resource__pb2.DESCRIPTOR, google_dot_cloud_dot_websecurityscanner__v1beta_dot_proto_dot_scan__run__pb2.DESCRIPTOR, google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, ], @@ -65,8 +67,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=1441, - serialized_end=1537, + serialized_start=1517, + serialized_end=1613, ) _sym_db.RegisterEnumDescriptor(_SCANCONFIG_USERAGENT) @@ -92,8 +94,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=1539, - serialized_end=1617, + serialized_start=1615, + serialized_end=1693, ) _sym_db.RegisterEnumDescriptor(_SCANCONFIG_TARGETPLATFORM) @@ -119,8 +121,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=1619, - serialized_end=1679, + serialized_start=1695, + serialized_end=1755, ) _sym_db.RegisterEnumDescriptor(_SCANCONFIG_RISKLEVEL) @@ -146,8 +148,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=1681, - serialized_end=1790, + serialized_start=1757, + serialized_end=1866, ) _sym_db.RegisterEnumDescriptor(_SCANCONFIG_EXPORTTOSECURITYCOMMANDCENTER) @@ -174,7 +176,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -192,7 +194,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002\340A\004"), file=DESCRIPTOR, ), ], @@ -204,8 +206,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1203, - serialized_end=1254, + serialized_start=1243, + serialized_end=1307, ) _SCANCONFIG_AUTHENTICATION_CUSTOMACCOUNT = _descriptor.Descriptor( @@ -230,7 +232,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -248,7 +250,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002\340A\004"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -266,7 +268,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -278,8 +280,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1256, - serialized_end=1326, + serialized_start=1309, + serialized_end=1397, ) _SCANCONFIG_AUTHENTICATION = _descriptor.Descriptor( @@ -345,8 +347,8 @@ fields=[], ) ], - serialized_start=971, - serialized_end=1344, + serialized_start=1011, + serialized_end=1415, ) _SCANCONFIG_SCHEDULE = _descriptor.Descriptor( @@ -389,7 +391,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -401,8 +403,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1346, - serialized_end=1439, + serialized_start=1417, + serialized_end=1515, ) _SCANCONFIG = _descriptor.Descriptor( @@ -445,7 +447,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -481,7 +483,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -637,13 +639,15 @@ _SCANCONFIG_RISKLEVEL, _SCANCONFIG_EXPORTTOSECURITYCOMMANDCENTER, ], - serialized_options=None, + serialized_options=_b( + "\352A\\\n,websecurityscanner.googleapis.com/ScanConfig\022,projects/{project}/scanConfigs/{scan_config}" + ), is_extendable=False, syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=231, - serialized_end=1790, + serialized_start=261, + serialized_end=1963, ) _SCANCONFIG_AUTHENTICATION_GOOGLEACCOUNT.containing_type = _SCANCONFIG_AUTHENTICATION @@ -712,7 +716,7 @@ username: Required. The user name of the Google account. password: - Input only. Required. The password of the Google account. The + Required. Input only. The password of the Google account. The credential is stored encrypted and not returned in any response nor included in audit logs. """, @@ -732,7 +736,7 @@ username: Required. The user name of the custom account. password: - Input only. Required. The password of the custom account. The + Required. Input only. The password of the custom account. The credential is stored encrypted and not returned in any response nor included in audit logs. login_url: @@ -830,4 +834,13 @@ DESCRIPTOR._options = None +_SCANCONFIG_AUTHENTICATION_GOOGLEACCOUNT.fields_by_name["username"]._options = None +_SCANCONFIG_AUTHENTICATION_GOOGLEACCOUNT.fields_by_name["password"]._options = None +_SCANCONFIG_AUTHENTICATION_CUSTOMACCOUNT.fields_by_name["username"]._options = None +_SCANCONFIG_AUTHENTICATION_CUSTOMACCOUNT.fields_by_name["password"]._options = None +_SCANCONFIG_AUTHENTICATION_CUSTOMACCOUNT.fields_by_name["login_url"]._options = None +_SCANCONFIG_SCHEDULE.fields_by_name["interval_duration_days"]._options = None +_SCANCONFIG.fields_by_name["display_name"]._options = None +_SCANCONFIG.fields_by_name["starting_urls"]._options = None +_SCANCONFIG._options = None # @@protoc_insertion_point(module_scope) diff --git a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_run.proto b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_run.proto index 497edc39bd1b..84564c6e2f50 100644 --- a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_run.proto +++ b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_run.proto @@ -17,7 +17,7 @@ syntax = "proto3"; package google.cloud.websecurityscanner.v1beta; -import "google/api/annotations.proto"; +import "google/api/resource.proto"; import "google/cloud/websecurityscanner/v1beta/scan_run_error_trace.proto"; import "google/cloud/websecurityscanner/v1beta/scan_run_warning_trace.proto"; import "google/protobuf/timestamp.proto"; @@ -31,6 +31,11 @@ option php_namespace = "Google\\Cloud\\WebSecurityScanner\\V1beta"; // A ScanRun is a output-only resource representing an actual run of the scan. // Next id: 12 message ScanRun { + option (google.api.resource) = { + type: "websecurityscanner.googleapis.com/ScanRun" + pattern: "projects/{project}/scanConfigs/{scan_config}/scanRuns/{scan_run}" + }; + // Types of ScanRun execution state. enum ExecutionState { // Represents an invalid state caused by internal server error. This value @@ -63,59 +68,48 @@ message ScanRun { KILLED = 3; } - // Output only. // The resource name of the ScanRun. The name follows the format of // 'projects/{projectId}/scanConfigs/{scanConfigId}/scanRuns/{scanRunId}'. // The ScanRun IDs are generated by the system. string name = 1; - // Output only. // The execution state of the ScanRun. ExecutionState execution_state = 2; - // Output only. // The result state of the ScanRun. This field is only available after the // execution state reaches "FINISHED". ResultState result_state = 3; - // Output only. // The time at which the ScanRun started. google.protobuf.Timestamp start_time = 4; - // Output only. // The time at which the ScanRun reached termination state - that the ScanRun // is either finished or stopped by user. google.protobuf.Timestamp end_time = 5; - // Output only. // The number of URLs crawled during this ScanRun. If the scan is in progress, // the value represents the number of URLs crawled up to now. int64 urls_crawled_count = 6; - // Output only. // The number of URLs tested during this ScanRun. If the scan is in progress, // the value represents the number of URLs tested up to now. The number of // URLs tested is usually larger than the number URLS crawled because // typically a crawled URL is tested with multiple test payloads. int64 urls_tested_count = 7; - // Output only. // Whether the scan run has found any vulnerabilities. bool has_vulnerabilities = 8; - // Output only. // The percentage of total completion ranging from 0 to 100. // If the scan is in queue, the value is 0. // If the scan is running, the value ranges from 0 to 100. // If the scan is finished, the value is 100. int32 progress_percent = 9; - // Output only. // If result_state is an ERROR, this field provides the primary reason for // scan's termination and more details, if such are available. ScanRunErrorTrace error_trace = 10; - // Output only. // A list of warnings, if such are encountered during this scan run. repeated ScanRunWarningTrace warning_traces = 11; } diff --git a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_run_error_trace.proto b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_run_error_trace.proto index 3d1a391dc6da..248967d245a4 100644 --- a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_run_error_trace.proto +++ b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_run_error_trace.proto @@ -55,21 +55,18 @@ message ScanRunErrorTrace { // Indicates that a scan encountered numerous errors from the web site // pages. When available, most_common_http_error_code field indicates the - // the most common HTTP error code encountered during the scan. + // most common HTTP error code encountered during the scan. TOO_MANY_HTTP_ERRORS = 6; } - // Output only. // Indicates the error reason code. Code code = 1; - // Output only. // If the scan encounters SCAN_CONFIG_ISSUE error, this field has the error // message encountered during scan configuration validation that is performed // before each scan run. ScanConfigError scan_config_error = 2; - // Output only. // If the scan encounters TOO_MANY_HTTP_ERRORS, this field indicates the most // common HTTP error code, if such is available. For example, if this code is // 404, the scan has encountered too many NOT_FOUND responses. diff --git a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_run_error_trace_pb2.py b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_run_error_trace_pb2.py index 9d928fa70de8..b5975a795354 100644 --- a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_run_error_trace_pb2.py +++ b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_run_error_trace_pb2.py @@ -191,16 +191,15 @@ Attributes: code: - Output only. Indicates the error reason code. + Indicates the error reason code. scan_config_error: - Output only. If the scan encounters SCAN\_CONFIG\_ISSUE error, - this field has the error message encountered during scan - configuration validation that is performed before each scan - run. + If the scan encounters SCAN\_CONFIG\_ISSUE error, this field + has the error message encountered during scan configuration + validation that is performed before each scan run. most_common_http_error_code: - Output only. If the scan encounters TOO\_MANY\_HTTP\_ERRORS, - this field indicates the most common HTTP error code, if such - is available. For example, if this code is 404, the scan has + If the scan encounters TOO\_MANY\_HTTP\_ERRORS, this field + indicates the most common HTTP error code, if such is + available. For example, if this code is 404, the scan has encountered too many NOT\_FOUND responses. """, # @@protoc_insertion_point(class_scope:google.cloud.websecurityscanner.v1beta.ScanRunErrorTrace) diff --git a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_run_pb2.py b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_run_pb2.py index eb528ae4ff57..6354b6fd38e1 100644 --- a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_run_pb2.py +++ b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_run_pb2.py @@ -15,7 +15,7 @@ _sym_db = _symbol_database.Default() -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 from google.cloud.websecurityscanner_v1beta.proto import ( scan_run_error_trace_pb2 as google_dot_cloud_dot_websecurityscanner__v1beta_dot_proto_dot_scan__run__error__trace__pb2, ) @@ -33,10 +33,10 @@ "\n*com.google.cloud.websecurityscanner.v1betaB\014ScanRunProtoP\001ZXgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta;websecurityscanner\312\002&Google\\Cloud\\WebSecurityScanner\\V1beta" ), serialized_pb=_b( - '\n;google/cloud/websecurityscanner_v1beta/proto/scan_run.proto\x12&google.cloud.websecurityscanner.v1beta\x1a\x1cgoogle/api/annotations.proto\x1aGgoogle/cloud/websecurityscanner_v1beta/proto/scan_run_error_trace.proto\x1aIgoogle/cloud/websecurityscanner_v1beta/proto/scan_run_warning_trace.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xe0\x05\n\x07ScanRun\x12\x0c\n\x04name\x18\x01 \x01(\t\x12W\n\x0f\x65xecution_state\x18\x02 \x01(\x0e\x32>.google.cloud.websecurityscanner.v1beta.ScanRun.ExecutionState\x12Q\n\x0cresult_state\x18\x03 \x01(\x0e\x32;.google.cloud.websecurityscanner.v1beta.ScanRun.ResultState\x12.\n\nstart_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x1a\n\x12urls_crawled_count\x18\x06 \x01(\x03\x12\x19\n\x11urls_tested_count\x18\x07 \x01(\x03\x12\x1b\n\x13has_vulnerabilities\x18\x08 \x01(\x08\x12\x18\n\x10progress_percent\x18\t \x01(\x05\x12N\n\x0b\x65rror_trace\x18\n \x01(\x0b\x32\x39.google.cloud.websecurityscanner.v1beta.ScanRunErrorTrace\x12S\n\x0ewarning_traces\x18\x0b \x03(\x0b\x32;.google.cloud.websecurityscanner.v1beta.ScanRunWarningTrace"Y\n\x0e\x45xecutionState\x12\x1f\n\x1b\x45XECUTION_STATE_UNSPECIFIED\x10\x00\x12\n\n\x06QUEUED\x10\x01\x12\x0c\n\x08SCANNING\x10\x02\x12\x0c\n\x08\x46INISHED\x10\x03"O\n\x0bResultState\x12\x1c\n\x18RESULT_STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07SUCCESS\x10\x01\x12\t\n\x05\x45RROR\x10\x02\x12\n\n\x06KILLED\x10\x03\x42\xbf\x01\n*com.google.cloud.websecurityscanner.v1betaB\x0cScanRunProtoP\x01ZXgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta;websecurityscanner\xca\x02&Google\\Cloud\\WebSecurityScanner\\V1betab\x06proto3' + '\n;google/cloud/websecurityscanner_v1beta/proto/scan_run.proto\x12&google.cloud.websecurityscanner.v1beta\x1a\x19google/api/resource.proto\x1aGgoogle/cloud/websecurityscanner_v1beta/proto/scan_run_error_trace.proto\x1aIgoogle/cloud/websecurityscanner_v1beta/proto/scan_run_warning_trace.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\xd2\x06\n\x07ScanRun\x12\x0c\n\x04name\x18\x01 \x01(\t\x12W\n\x0f\x65xecution_state\x18\x02 \x01(\x0e\x32>.google.cloud.websecurityscanner.v1beta.ScanRun.ExecutionState\x12Q\n\x0cresult_state\x18\x03 \x01(\x0e\x32;.google.cloud.websecurityscanner.v1beta.ScanRun.ResultState\x12.\n\nstart_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x1a\n\x12urls_crawled_count\x18\x06 \x01(\x03\x12\x19\n\x11urls_tested_count\x18\x07 \x01(\x03\x12\x1b\n\x13has_vulnerabilities\x18\x08 \x01(\x08\x12\x18\n\x10progress_percent\x18\t \x01(\x05\x12N\n\x0b\x65rror_trace\x18\n \x01(\x0b\x32\x39.google.cloud.websecurityscanner.v1beta.ScanRunErrorTrace\x12S\n\x0ewarning_traces\x18\x0b \x03(\x0b\x32;.google.cloud.websecurityscanner.v1beta.ScanRunWarningTrace"Y\n\x0e\x45xecutionState\x12\x1f\n\x1b\x45XECUTION_STATE_UNSPECIFIED\x10\x00\x12\n\n\x06QUEUED\x10\x01\x12\x0c\n\x08SCANNING\x10\x02\x12\x0c\n\x08\x46INISHED\x10\x03"O\n\x0bResultState\x12\x1c\n\x18RESULT_STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07SUCCESS\x10\x01\x12\t\n\x05\x45RROR\x10\x02\x12\n\n\x06KILLED\x10\x03:p\xea\x41m\n)websecurityscanner.googleapis.com/ScanRun\x12@projects/{project}/scanConfigs/{scan_config}/scanRuns/{scan_run}B\xbf\x01\n*com.google.cloud.websecurityscanner.v1betaB\x0cScanRunProtoP\x01ZXgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta;websecurityscanner\xca\x02&Google\\Cloud\\WebSecurityScanner\\V1betab\x06proto3' ), dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_resource__pb2.DESCRIPTOR, google_dot_cloud_dot_websecurityscanner__v1beta_dot_proto_dot_scan__run__error__trace__pb2.DESCRIPTOR, google_dot_cloud_dot_websecurityscanner__v1beta_dot_proto_dot_scan__run__warning__trace__pb2.DESCRIPTOR, google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, @@ -69,8 +69,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=881, - serialized_end=970, + serialized_start=878, + serialized_end=967, ) _sym_db.RegisterEnumDescriptor(_SCANRUN_EXECUTIONSTATE) @@ -99,8 +99,8 @@ ], containing_type=None, serialized_options=None, - serialized_start=972, - serialized_end=1051, + serialized_start=969, + serialized_end=1048, ) _sym_db.RegisterEnumDescriptor(_SCANRUN_RESULTSTATE) @@ -314,13 +314,15 @@ extensions=[], nested_types=[], enum_types=[_SCANRUN_EXECUTIONSTATE, _SCANRUN_RESULTSTATE], - serialized_options=None, + serialized_options=_b( + "\352Am\n)websecurityscanner.googleapis.com/ScanRun\022@projects/{project}/scanConfigs/{scan_config}/scanRuns/{scan_run}" + ), is_extendable=False, syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=315, - serialized_end=1051, + serialized_start=312, + serialized_end=1162, ) _SCANRUN.fields_by_name["execution_state"].enum_type = _SCANRUN_EXECUTIONSTATE @@ -358,46 +360,43 @@ Attributes: name: - Output only. The resource name of the ScanRun. The name - follows the format of 'projects/{projectId}/scanConfigs/{scanC - onfigId}/scanRuns/{scanRunId}'. The ScanRun IDs are generated - by the system. + The resource name of the ScanRun. The name follows the format + of 'projects/{projectId}/scanConfigs/{scanConfigId}/scanRuns/{ + scanRunId}'. The ScanRun IDs are generated by the system. execution_state: - Output only. The execution state of the ScanRun. + The execution state of the ScanRun. result_state: - Output only. The result state of the ScanRun. This field is - only available after the execution state reaches "FINISHED". + The result state of the ScanRun. This field is only available + after the execution state reaches "FINISHED". start_time: - Output only. The time at which the ScanRun started. + The time at which the ScanRun started. end_time: - Output only. The time at which the ScanRun reached termination - state - that the ScanRun is either finished or stopped by - user. + The time at which the ScanRun reached termination state - that + the ScanRun is either finished or stopped by user. urls_crawled_count: - Output only. The number of URLs crawled during this ScanRun. - If the scan is in progress, the value represents the number of - URLs crawled up to now. + The number of URLs crawled during this ScanRun. If the scan is + in progress, the value represents the number of URLs crawled + up to now. urls_tested_count: - Output only. The number of URLs tested during this ScanRun. If - the scan is in progress, the value represents the number of - URLs tested up to now. The number of URLs tested is usually - larger than the number URLS crawled because typically a - crawled URL is tested with multiple test payloads. + The number of URLs tested during this ScanRun. If the scan is + in progress, the value represents the number of URLs tested up + to now. The number of URLs tested is usually larger than the + number URLS crawled because typically a crawled URL is tested + with multiple test payloads. has_vulnerabilities: - Output only. Whether the scan run has found any - vulnerabilities. + Whether the scan run has found any vulnerabilities. progress_percent: - Output only. The percentage of total completion ranging from 0 - to 100. If the scan is in queue, the value is 0. If the scan - is running, the value ranges from 0 to 100. If the scan is - finished, the value is 100. + The percentage of total completion ranging from 0 to 100. If + the scan is in queue, the value is 0. If the scan is running, + the value ranges from 0 to 100. If the scan is finished, the + value is 100. error_trace: - Output only. If result\_state is an ERROR, this field provides - the primary reason for scan's termination and more details, if - such are available. + If result\_state is an ERROR, this field provides the primary + reason for scan's termination and more details, if such are + available. warning_traces: - Output only. A list of warnings, if such are encountered - during this scan run. + A list of warnings, if such are encountered during this scan + run. """, # @@protoc_insertion_point(class_scope:google.cloud.websecurityscanner.v1beta.ScanRun) ), @@ -406,4 +405,5 @@ DESCRIPTOR._options = None +_SCANRUN._options = None # @@protoc_insertion_point(module_scope) diff --git a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_run_warning_trace.proto b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_run_warning_trace.proto index e461ad0b55b3..8207a02906a4 100644 --- a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_run_warning_trace.proto +++ b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_run_warning_trace.proto @@ -29,7 +29,7 @@ option php_namespace = "Google\\Cloud\\WebSecurityScanner\\V1beta"; message ScanRunWarningTrace { // Output only. // Defines a warning message code. - // Next id: 5 + // Next id: 6 enum Code { // Default value is never used. CODE_UNSPECIFIED = 0; @@ -52,7 +52,6 @@ message ScanRunWarningTrace { BLOCKED_BY_IAP = 4; } - // Output only. // Indicates the warning code. Code code = 1; } diff --git a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_run_warning_trace_pb2.py b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_run_warning_trace_pb2.py index f03a9418a590..cd94b1708679 100644 --- a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_run_warning_trace_pb2.py +++ b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/scan_run_warning_trace_pb2.py @@ -130,7 +130,7 @@ Attributes: code: - Output only. Indicates the warning code. + Indicates the warning code. """, # @@protoc_insertion_point(class_scope:google.cloud.websecurityscanner.v1beta.ScanRunWarningTrace) ), diff --git a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/web_security_scanner.proto b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/web_security_scanner.proto index d328a2860214..9ea6207c2420 100644 --- a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/web_security_scanner.proto +++ b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/web_security_scanner.proto @@ -18,6 +18,9 @@ syntax = "proto3"; package google.cloud.websecurityscanner.v1beta; import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; import "google/cloud/websecurityscanner/v1beta/crawled_url.proto"; import "google/cloud/websecurityscanner/v1beta/finding.proto"; import "google/cloud/websecurityscanner/v1beta/finding_type_stats.proto"; @@ -36,12 +39,16 @@ option php_namespace = "Google\\Cloud\\WebSecurityScanner\\V1beta"; // applications hosted on Google Cloud Platform. It crawls your application, and // attempts to exercise as many user inputs and event handlers as possible. service WebSecurityScanner { + option (google.api.default_host) = "websecurityscanner.googleapis.com"; + option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; + // Creates a new ScanConfig. rpc CreateScanConfig(CreateScanConfigRequest) returns (ScanConfig) { option (google.api.http) = { post: "/v1beta/{parent=projects/*}/scanConfigs" body: "scan_config" }; + option (google.api.method_signature) = "parent,scan_config"; } // Deletes an existing ScanConfig and its child resources. @@ -49,6 +56,7 @@ service WebSecurityScanner { option (google.api.http) = { delete: "/v1beta/{name=projects/*/scanConfigs/*}" }; + option (google.api.method_signature) = "name"; } // Gets a ScanConfig. @@ -56,6 +64,7 @@ service WebSecurityScanner { option (google.api.http) = { get: "/v1beta/{name=projects/*/scanConfigs/*}" }; + option (google.api.method_signature) = "name"; } // Lists ScanConfigs under a given project. @@ -63,6 +72,7 @@ service WebSecurityScanner { option (google.api.http) = { get: "/v1beta/{parent=projects/*}/scanConfigs" }; + option (google.api.method_signature) = "parent"; } // Updates a ScanConfig. This method support partial update of a ScanConfig. @@ -71,6 +81,7 @@ service WebSecurityScanner { patch: "/v1beta/{scan_config.name=projects/*/scanConfigs/*}" body: "scan_config" }; + option (google.api.method_signature) = "scan_config,update_mask"; } // Start a ScanRun according to the given ScanConfig. @@ -79,6 +90,7 @@ service WebSecurityScanner { post: "/v1beta/{name=projects/*/scanConfigs/*}:start" body: "*" }; + option (google.api.method_signature) = "name"; } // Gets a ScanRun. @@ -86,6 +98,7 @@ service WebSecurityScanner { option (google.api.http) = { get: "/v1beta/{name=projects/*/scanConfigs/*/scanRuns/*}" }; + option (google.api.method_signature) = "name"; } // Lists ScanRuns under a given ScanConfig, in descending order of ScanRun @@ -94,6 +107,7 @@ service WebSecurityScanner { option (google.api.http) = { get: "/v1beta/{parent=projects/*/scanConfigs/*}/scanRuns" }; + option (google.api.method_signature) = "parent"; } // Stops a ScanRun. The stopped ScanRun is returned. @@ -102,6 +116,7 @@ service WebSecurityScanner { post: "/v1beta/{name=projects/*/scanConfigs/*/scanRuns/*}:stop" body: "*" }; + option (google.api.method_signature) = "name"; } // List CrawledUrls under a given ScanRun. @@ -109,6 +124,7 @@ service WebSecurityScanner { option (google.api.http) = { get: "/v1beta/{parent=projects/*/scanConfigs/*/scanRuns/*}/crawledUrls" }; + option (google.api.method_signature) = "parent"; } // Gets a Finding. @@ -116,6 +132,7 @@ service WebSecurityScanner { option (google.api.http) = { get: "/v1beta/{name=projects/*/scanConfigs/*/scanRuns/*/findings/*}" }; + option (google.api.method_signature) = "name"; } // List Findings under a given ScanRun. @@ -123,6 +140,7 @@ service WebSecurityScanner { option (google.api.http) = { get: "/v1beta/{parent=projects/*/scanConfigs/*/scanRuns/*}/findings" }; + option (google.api.method_signature) = "parent,filter"; } // List all FindingTypeStats under a given ScanRun. @@ -130,43 +148,59 @@ service WebSecurityScanner { option (google.api.http) = { get: "/v1beta/{parent=projects/*/scanConfigs/*/scanRuns/*}/findingTypeStats" }; + option (google.api.method_signature) = "parent"; } } // Request for the `CreateScanConfig` method. message CreateScanConfigRequest { - // Required. - // The parent resource name where the scan is created, which should be a + // Required. The parent resource name where the scan is created, which should be a // project resource name in the format 'projects/{projectId}'. - string parent = 1; - - // Required. - // The ScanConfig to be created. - ScanConfig scan_config = 2; + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "cloudresourcemanager.googleapis.com/Project" + } + ]; + + // Required. The ScanConfig to be created. + ScanConfig scan_config = 2 [(google.api.field_behavior) = REQUIRED]; } // Request for the `DeleteScanConfig` method. message DeleteScanConfigRequest { - // Required. - // The resource name of the ScanConfig to be deleted. The name follows the + // Required. The resource name of the ScanConfig to be deleted. The name follows the // format of 'projects/{projectId}/scanConfigs/{scanConfigId}'. - string name = 1; + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "websecurityscanner.googleapis.com/ScanConfig" + } + ]; } // Request for the `GetScanConfig` method. message GetScanConfigRequest { - // Required. - // The resource name of the ScanConfig to be returned. The name follows the + // Required. The resource name of the ScanConfig to be returned. The name follows the // format of 'projects/{projectId}/scanConfigs/{scanConfigId}'. - string name = 1; + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "websecurityscanner.googleapis.com/ScanConfig" + } + ]; } // Request for the `ListScanConfigs` method. message ListScanConfigsRequest { - // Required. - // The parent resource name, which should be a project resource name in the + // Required. The parent resource name, which should be a project resource name in the // format 'projects/{projectId}'. - string parent = 1; + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "cloudresourcemanager.googleapis.com/Project" + } + ]; // A token identifying a page of results to be returned. This should be a // `next_page_token` value returned from a previous List request. @@ -181,17 +215,15 @@ message ListScanConfigsRequest { // Request for the `UpdateScanConfigRequest` method. message UpdateScanConfigRequest { - // Required. - // The ScanConfig to be updated. The name field must be set to identify the + // Required. The ScanConfig to be updated. The name field must be set to identify the // resource to be updated. The values of fields not covered by the mask // will be ignored. - ScanConfig scan_config = 2; + ScanConfig scan_config = 2 [(google.api.field_behavior) = REQUIRED]; - // Required. - // The update mask applies to the resource. For the `FieldMask` definition, + // Required. The update mask applies to the resource. For the `FieldMask` definition, // see // https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask - google.protobuf.FieldMask update_mask = 3; + google.protobuf.FieldMask update_mask = 3 [(google.api.field_behavior) = REQUIRED]; } // Response for the `ListScanConfigs` method. @@ -206,27 +238,39 @@ message ListScanConfigsResponse { // Request for the `StartScanRun` method. message StartScanRunRequest { - // Required. - // The resource name of the ScanConfig to be used. The name follows the + // Required. The resource name of the ScanConfig to be used. The name follows the // format of 'projects/{projectId}/scanConfigs/{scanConfigId}'. - string name = 1; + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "websecurityscanner.googleapis.com/ScanConfig" + } + ]; } // Request for the `GetScanRun` method. message GetScanRunRequest { - // Required. - // The resource name of the ScanRun to be returned. The name follows the + // Required. The resource name of the ScanRun to be returned. The name follows the // format of // 'projects/{projectId}/scanConfigs/{scanConfigId}/scanRuns/{scanRunId}'. - string name = 1; + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "websecurityscanner.googleapis.com/ScanRun" + } + ]; } // Request for the `ListScanRuns` method. message ListScanRunsRequest { - // Required. - // The parent resource name, which should be a scan resource name in the + // Required. The parent resource name, which should be a scan resource name in the // format 'projects/{projectId}/scanConfigs/{scanConfigId}'. - string parent = 1; + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "websecurityscanner.googleapis.com/ScanConfig" + } + ]; // A token identifying a page of results to be returned. This should be a // `next_page_token` value returned from a previous List request. @@ -251,20 +295,28 @@ message ListScanRunsResponse { // Request for the `StopScanRun` method. message StopScanRunRequest { - // Required. - // The resource name of the ScanRun to be stopped. The name follows the + // Required. The resource name of the ScanRun to be stopped. The name follows the // format of // 'projects/{projectId}/scanConfigs/{scanConfigId}/scanRuns/{scanRunId}'. - string name = 1; + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "websecurityscanner.googleapis.com/ScanRun" + } + ]; } // Request for the `ListCrawledUrls` method. message ListCrawledUrlsRequest { - // Required. - // The parent resource name, which should be a scan run resource name in the + // Required. The parent resource name, which should be a scan run resource name in the // format // 'projects/{projectId}/scanConfigs/{scanConfigId}/scanRuns/{scanRunId}'. - string parent = 1; + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "websecurityscanner.googleapis.com/ScanRun" + } + ]; // A token identifying a page of results to be returned. This should be a // `next_page_token` value returned from a previous List request. @@ -289,26 +341,34 @@ message ListCrawledUrlsResponse { // Request for the `GetFinding` method. message GetFindingRequest { - // Required. - // The resource name of the Finding to be returned. The name follows the + // Required. The resource name of the Finding to be returned. The name follows the // format of // 'projects/{projectId}/scanConfigs/{scanConfigId}/scanRuns/{scanRunId}/findings/{findingId}'. - string name = 1; + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "websecurityscanner.googleapis.com/Finding" + } + ]; } // Request for the `ListFindings` method. message ListFindingsRequest { - // Required. - // The parent resource name, which should be a scan run resource name in the + // Required. The parent resource name, which should be a scan run resource name in the // format // 'projects/{projectId}/scanConfigs/{scanConfigId}/scanRuns/{scanRunId}'. - string parent = 1; - - // The filter expression. The expression must be in the format: + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "websecurityscanner.googleapis.com/ScanRun" + } + ]; + + // Required. The filter expression. The expression must be in the format: // . // Supported field: 'finding_type'. // Supported operator: '='. - string filter = 2; + string filter = 2 [(google.api.field_behavior) = REQUIRED]; // A token identifying a page of results to be returned. This should be a // `next_page_token` value returned from a previous List request. @@ -333,11 +393,15 @@ message ListFindingsResponse { // Request for the `ListFindingTypeStats` method. message ListFindingTypeStatsRequest { - // Required. - // The parent resource name, which should be a scan run resource name in the + // Required. The parent resource name, which should be a scan run resource name in the // format // 'projects/{projectId}/scanConfigs/{scanConfigId}/scanRuns/{scanRunId}'. - string parent = 1; + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "websecurityscanner.googleapis.com/ScanRun" + } + ]; } // Response for the `ListFindingTypeStats` method. diff --git a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/web_security_scanner_pb2.py b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/web_security_scanner_pb2.py index d1a34a30f035..ad3f1d89f4b2 100644 --- a/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/web_security_scanner_pb2.py +++ b/websecurityscanner/google/cloud/websecurityscanner_v1beta/proto/web_security_scanner_pb2.py @@ -16,6 +16,9 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 +from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 from google.cloud.websecurityscanner_v1beta.proto import ( crawled_url_pb2 as google_dot_cloud_dot_websecurityscanner__v1beta_dot_proto_dot_crawled__url__pb2, ) @@ -43,10 +46,13 @@ "\n*com.google.cloud.websecurityscanner.v1betaB\027WebSecurityScannerProtoP\001ZXgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta;websecurityscanner\312\002&Google\\Cloud\\WebSecurityScanner\\V1beta" ), serialized_pb=_b( - '\nGgoogle/cloud/websecurityscanner_v1beta/proto/web_security_scanner.proto\x12&google.cloud.websecurityscanner.v1beta\x1a\x1cgoogle/api/annotations.proto\x1a>google/cloud/websecurityscanner_v1beta/proto/crawled_url.proto\x1a:google/cloud/websecurityscanner_v1beta/proto/finding.proto\x1a\x45google/cloud/websecurityscanner_v1beta/proto/finding_type_stats.proto\x1a>google/cloud/websecurityscanner_v1beta/proto/scan_config.proto\x1a;google/cloud/websecurityscanner_v1beta/proto/scan_run.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto"r\n\x17\x43reateScanConfigRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12G\n\x0bscan_config\x18\x02 \x01(\x0b\x32\x32.google.cloud.websecurityscanner.v1beta.ScanConfig"\'\n\x17\x44\x65leteScanConfigRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"$\n\x14GetScanConfigRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"O\n\x16ListScanConfigsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05"\x93\x01\n\x17UpdateScanConfigRequest\x12G\n\x0bscan_config\x18\x02 \x01(\x0b\x32\x32.google.cloud.websecurityscanner.v1beta.ScanConfig\x12/\n\x0bupdate_mask\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"|\n\x17ListScanConfigsResponse\x12H\n\x0cscan_configs\x18\x01 \x03(\x0b\x32\x32.google.cloud.websecurityscanner.v1beta.ScanConfig\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"#\n\x13StartScanRunRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"!\n\x11GetScanRunRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"L\n\x13ListScanRunsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05"s\n\x14ListScanRunsResponse\x12\x42\n\tscan_runs\x18\x01 \x03(\x0b\x32/.google.cloud.websecurityscanner.v1beta.ScanRun\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t""\n\x12StopScanRunRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"O\n\x16ListCrawledUrlsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05"|\n\x17ListCrawledUrlsResponse\x12H\n\x0c\x63rawled_urls\x18\x01 \x03(\x0b\x32\x32.google.cloud.websecurityscanner.v1beta.CrawledUrl\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"!\n\x11GetFindingRequest\x12\x0c\n\x04name\x18\x01 \x01(\t"\\\n\x13ListFindingsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x02 \x01(\t\x12\x12\n\npage_token\x18\x03 \x01(\t\x12\x11\n\tpage_size\x18\x04 \x01(\x05"r\n\x14ListFindingsResponse\x12\x41\n\x08\x66indings\x18\x01 \x03(\x0b\x32/.google.cloud.websecurityscanner.v1beta.Finding\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"-\n\x1bListFindingTypeStatsRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t"t\n\x1cListFindingTypeStatsResponse\x12T\n\x12\x66inding_type_stats\x18\x01 \x03(\x0b\x32\x38.google.cloud.websecurityscanner.v1beta.FindingTypeStats2\xaa\x14\n\x12WebSecurityScanner\x12\xc5\x01\n\x10\x43reateScanConfig\x12?.google.cloud.websecurityscanner.v1beta.CreateScanConfigRequest\x1a\x32.google.cloud.websecurityscanner.v1beta.ScanConfig"<\x82\xd3\xe4\x93\x02\x36"\'/v1beta/{parent=projects/*}/scanConfigs:\x0bscan_config\x12\x9c\x01\n\x10\x44\x65leteScanConfig\x12?.google.cloud.websecurityscanner.v1beta.DeleteScanConfigRequest\x1a\x16.google.protobuf.Empty"/\x82\xd3\xe4\x93\x02)*\'/v1beta/{name=projects/*/scanConfigs/*}\x12\xb2\x01\n\rGetScanConfig\x12<.google.cloud.websecurityscanner.v1beta.GetScanConfigRequest\x1a\x32.google.cloud.websecurityscanner.v1beta.ScanConfig"/\x82\xd3\xe4\x93\x02)\x12\'/v1beta/{name=projects/*/scanConfigs/*}\x12\xc3\x01\n\x0fListScanConfigs\x12>.google.cloud.websecurityscanner.v1beta.ListScanConfigsRequest\x1a?.google.cloud.websecurityscanner.v1beta.ListScanConfigsResponse"/\x82\xd3\xe4\x93\x02)\x12\'/v1beta/{parent=projects/*}/scanConfigs\x12\xd1\x01\n\x10UpdateScanConfig\x12?.google.cloud.websecurityscanner.v1beta.UpdateScanConfigRequest\x1a\x32.google.cloud.websecurityscanner.v1beta.ScanConfig"H\x82\xd3\xe4\x93\x02\x42\x32\x33/v1beta/{scan_config.name=projects/*/scanConfigs/*}:\x0bscan_config\x12\xb6\x01\n\x0cStartScanRun\x12;.google.cloud.websecurityscanner.v1beta.StartScanRunRequest\x1a/.google.cloud.websecurityscanner.v1beta.ScanRun"8\x82\xd3\xe4\x93\x02\x32"-/v1beta/{name=projects/*/scanConfigs/*}:start:\x01*\x12\xb4\x01\n\nGetScanRun\x12\x39.google.cloud.websecurityscanner.v1beta.GetScanRunRequest\x1a/.google.cloud.websecurityscanner.v1beta.ScanRun":\x82\xd3\xe4\x93\x02\x34\x12\x32/v1beta/{name=projects/*/scanConfigs/*/scanRuns/*}\x12\xc5\x01\n\x0cListScanRuns\x12;.google.cloud.websecurityscanner.v1beta.ListScanRunsRequest\x1a<.google.cloud.websecurityscanner.v1beta.ListScanRunsResponse":\x82\xd3\xe4\x93\x02\x34\x12\x32/v1beta/{parent=projects/*/scanConfigs/*}/scanRuns\x12\xbe\x01\n\x0bStopScanRun\x12:.google.cloud.websecurityscanner.v1beta.StopScanRunRequest\x1a/.google.cloud.websecurityscanner.v1beta.ScanRun"B\x82\xd3\xe4\x93\x02<"7/v1beta/{name=projects/*/scanConfigs/*/scanRuns/*}:stop:\x01*\x12\xdc\x01\n\x0fListCrawledUrls\x12>.google.cloud.websecurityscanner.v1beta.ListCrawledUrlsRequest\x1a?.google.cloud.websecurityscanner.v1beta.ListCrawledUrlsResponse"H\x82\xd3\xe4\x93\x02\x42\x12@/v1beta/{parent=projects/*/scanConfigs/*/scanRuns/*}/crawledUrls\x12\xbf\x01\n\nGetFinding\x12\x39.google.cloud.websecurityscanner.v1beta.GetFindingRequest\x1a/.google.cloud.websecurityscanner.v1beta.Finding"E\x82\xd3\xe4\x93\x02?\x12=/v1beta/{name=projects/*/scanConfigs/*/scanRuns/*/findings/*}\x12\xd0\x01\n\x0cListFindings\x12;.google.cloud.websecurityscanner.v1beta.ListFindingsRequest\x1a<.google.cloud.websecurityscanner.v1beta.ListFindingsResponse"E\x82\xd3\xe4\x93\x02?\x12=/v1beta/{parent=projects/*/scanConfigs/*/scanRuns/*}/findings\x12\xf0\x01\n\x14ListFindingTypeStats\x12\x43.google.cloud.websecurityscanner.v1beta.ListFindingTypeStatsRequest\x1a\x44.google.cloud.websecurityscanner.v1beta.ListFindingTypeStatsResponse"M\x82\xd3\xe4\x93\x02G\x12\x45/v1beta/{parent=projects/*/scanConfigs/*/scanRuns/*}/findingTypeStatsB\xca\x01\n*com.google.cloud.websecurityscanner.v1betaB\x17WebSecurityScannerProtoP\x01ZXgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta;websecurityscanner\xca\x02&Google\\Cloud\\WebSecurityScanner\\V1betab\x06proto3' + '\nGgoogle/cloud/websecurityscanner_v1beta/proto/web_security_scanner.proto\x12&google.cloud.websecurityscanner.v1beta\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a>google/cloud/websecurityscanner_v1beta/proto/crawled_url.proto\x1a:google/cloud/websecurityscanner_v1beta/proto/finding.proto\x1a\x45google/cloud/websecurityscanner_v1beta/proto/finding_type_stats.proto\x1a>google/cloud/websecurityscanner_v1beta/proto/scan_config.proto\x1a;google/cloud/websecurityscanner_v1beta/proto/scan_run.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto"\xac\x01\n\x17\x43reateScanConfigRequest\x12\x43\n\x06parent\x18\x01 \x01(\tB3\xe0\x41\x02\xfa\x41-\n+cloudresourcemanager.googleapis.com/Project\x12L\n\x0bscan_config\x18\x02 \x01(\x0b\x32\x32.google.cloud.websecurityscanner.v1beta.ScanConfigB\x03\xe0\x41\x02"]\n\x17\x44\x65leteScanConfigRequest\x12\x42\n\x04name\x18\x01 \x01(\tB4\xe0\x41\x02\xfa\x41.\n,websecurityscanner.googleapis.com/ScanConfig"Z\n\x14GetScanConfigRequest\x12\x42\n\x04name\x18\x01 \x01(\tB4\xe0\x41\x02\xfa\x41.\n,websecurityscanner.googleapis.com/ScanConfig"\x84\x01\n\x16ListScanConfigsRequest\x12\x43\n\x06parent\x18\x01 \x01(\tB3\xe0\x41\x02\xfa\x41-\n+cloudresourcemanager.googleapis.com/Project\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05"\x9d\x01\n\x17UpdateScanConfigRequest\x12L\n\x0bscan_config\x18\x02 \x01(\x0b\x32\x32.google.cloud.websecurityscanner.v1beta.ScanConfigB\x03\xe0\x41\x02\x12\x34\n\x0bupdate_mask\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x02"|\n\x17ListScanConfigsResponse\x12H\n\x0cscan_configs\x18\x01 \x03(\x0b\x32\x32.google.cloud.websecurityscanner.v1beta.ScanConfig\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"Y\n\x13StartScanRunRequest\x12\x42\n\x04name\x18\x01 \x01(\tB4\xe0\x41\x02\xfa\x41.\n,websecurityscanner.googleapis.com/ScanConfig"T\n\x11GetScanRunRequest\x12?\n\x04name\x18\x01 \x01(\tB1\xe0\x41\x02\xfa\x41+\n)websecurityscanner.googleapis.com/ScanRun"\x82\x01\n\x13ListScanRunsRequest\x12\x44\n\x06parent\x18\x01 \x01(\tB4\xe0\x41\x02\xfa\x41.\n,websecurityscanner.googleapis.com/ScanConfig\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05"s\n\x14ListScanRunsResponse\x12\x42\n\tscan_runs\x18\x01 \x03(\x0b\x32/.google.cloud.websecurityscanner.v1beta.ScanRun\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"U\n\x12StopScanRunRequest\x12?\n\x04name\x18\x01 \x01(\tB1\xe0\x41\x02\xfa\x41+\n)websecurityscanner.googleapis.com/ScanRun"\x82\x01\n\x16ListCrawledUrlsRequest\x12\x41\n\x06parent\x18\x01 \x01(\tB1\xe0\x41\x02\xfa\x41+\n)websecurityscanner.googleapis.com/ScanRun\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05"|\n\x17ListCrawledUrlsResponse\x12H\n\x0c\x63rawled_urls\x18\x01 \x03(\x0b\x32\x32.google.cloud.websecurityscanner.v1beta.CrawledUrl\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"T\n\x11GetFindingRequest\x12?\n\x04name\x18\x01 \x01(\tB1\xe0\x41\x02\xfa\x41+\n)websecurityscanner.googleapis.com/Finding"\x94\x01\n\x13ListFindingsRequest\x12\x41\n\x06parent\x18\x01 \x01(\tB1\xe0\x41\x02\xfa\x41+\n)websecurityscanner.googleapis.com/ScanRun\x12\x13\n\x06\x66ilter\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x12\n\npage_token\x18\x03 \x01(\t\x12\x11\n\tpage_size\x18\x04 \x01(\x05"r\n\x14ListFindingsResponse\x12\x41\n\x08\x66indings\x18\x01 \x03(\x0b\x32/.google.cloud.websecurityscanner.v1beta.Finding\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"`\n\x1bListFindingTypeStatsRequest\x12\x41\n\x06parent\x18\x01 \x01(\tB1\xe0\x41\x02\xfa\x41+\n)websecurityscanner.googleapis.com/ScanRun"t\n\x1cListFindingTypeStatsResponse\x12T\n\x12\x66inding_type_stats\x18\x01 \x03(\x0b\x32\x38.google.cloud.websecurityscanner.v1beta.FindingTypeStats2\x8e\x16\n\x12WebSecurityScanner\x12\xda\x01\n\x10\x43reateScanConfig\x12?.google.cloud.websecurityscanner.v1beta.CreateScanConfigRequest\x1a\x32.google.cloud.websecurityscanner.v1beta.ScanConfig"Q\x82\xd3\xe4\x93\x02\x36"\'/v1beta/{parent=projects/*}/scanConfigs:\x0bscan_config\xda\x41\x12parent,scan_config\x12\xa3\x01\n\x10\x44\x65leteScanConfig\x12?.google.cloud.websecurityscanner.v1beta.DeleteScanConfigRequest\x1a\x16.google.protobuf.Empty"6\x82\xd3\xe4\x93\x02)*\'/v1beta/{name=projects/*/scanConfigs/*}\xda\x41\x04name\x12\xb9\x01\n\rGetScanConfig\x12<.google.cloud.websecurityscanner.v1beta.GetScanConfigRequest\x1a\x32.google.cloud.websecurityscanner.v1beta.ScanConfig"6\x82\xd3\xe4\x93\x02)\x12\'/v1beta/{name=projects/*/scanConfigs/*}\xda\x41\x04name\x12\xcc\x01\n\x0fListScanConfigs\x12>.google.cloud.websecurityscanner.v1beta.ListScanConfigsRequest\x1a?.google.cloud.websecurityscanner.v1beta.ListScanConfigsResponse"8\x82\xd3\xe4\x93\x02)\x12\'/v1beta/{parent=projects/*}/scanConfigs\xda\x41\x06parent\x12\xeb\x01\n\x10UpdateScanConfig\x12?.google.cloud.websecurityscanner.v1beta.UpdateScanConfigRequest\x1a\x32.google.cloud.websecurityscanner.v1beta.ScanConfig"b\x82\xd3\xe4\x93\x02\x42\x32\x33/v1beta/{scan_config.name=projects/*/scanConfigs/*}:\x0bscan_config\xda\x41\x17scan_config,update_mask\x12\xbd\x01\n\x0cStartScanRun\x12;.google.cloud.websecurityscanner.v1beta.StartScanRunRequest\x1a/.google.cloud.websecurityscanner.v1beta.ScanRun"?\x82\xd3\xe4\x93\x02\x32"-/v1beta/{name=projects/*/scanConfigs/*}:start:\x01*\xda\x41\x04name\x12\xbb\x01\n\nGetScanRun\x12\x39.google.cloud.websecurityscanner.v1beta.GetScanRunRequest\x1a/.google.cloud.websecurityscanner.v1beta.ScanRun"A\x82\xd3\xe4\x93\x02\x34\x12\x32/v1beta/{name=projects/*/scanConfigs/*/scanRuns/*}\xda\x41\x04name\x12\xce\x01\n\x0cListScanRuns\x12;.google.cloud.websecurityscanner.v1beta.ListScanRunsRequest\x1a<.google.cloud.websecurityscanner.v1beta.ListScanRunsResponse"C\x82\xd3\xe4\x93\x02\x34\x12\x32/v1beta/{parent=projects/*/scanConfigs/*}/scanRuns\xda\x41\x06parent\x12\xc5\x01\n\x0bStopScanRun\x12:.google.cloud.websecurityscanner.v1beta.StopScanRunRequest\x1a/.google.cloud.websecurityscanner.v1beta.ScanRun"I\x82\xd3\xe4\x93\x02<"7/v1beta/{name=projects/*/scanConfigs/*/scanRuns/*}:stop:\x01*\xda\x41\x04name\x12\xe5\x01\n\x0fListCrawledUrls\x12>.google.cloud.websecurityscanner.v1beta.ListCrawledUrlsRequest\x1a?.google.cloud.websecurityscanner.v1beta.ListCrawledUrlsResponse"Q\x82\xd3\xe4\x93\x02\x42\x12@/v1beta/{parent=projects/*/scanConfigs/*/scanRuns/*}/crawledUrls\xda\x41\x06parent\x12\xc6\x01\n\nGetFinding\x12\x39.google.cloud.websecurityscanner.v1beta.GetFindingRequest\x1a/.google.cloud.websecurityscanner.v1beta.Finding"L\x82\xd3\xe4\x93\x02?\x12=/v1beta/{name=projects/*/scanConfigs/*/scanRuns/*/findings/*}\xda\x41\x04name\x12\xe0\x01\n\x0cListFindings\x12;.google.cloud.websecurityscanner.v1beta.ListFindingsRequest\x1a<.google.cloud.websecurityscanner.v1beta.ListFindingsResponse"U\x82\xd3\xe4\x93\x02?\x12=/v1beta/{parent=projects/*/scanConfigs/*/scanRuns/*}/findings\xda\x41\rparent,filter\x12\xf9\x01\n\x14ListFindingTypeStats\x12\x43.google.cloud.websecurityscanner.v1beta.ListFindingTypeStatsRequest\x1a\x44.google.cloud.websecurityscanner.v1beta.ListFindingTypeStatsResponse"V\x82\xd3\xe4\x93\x02G\x12\x45/v1beta/{parent=projects/*/scanConfigs/*/scanRuns/*}/findingTypeStats\xda\x41\x06parent\x1aU\xca\x41!websecurityscanner.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformB\xca\x01\n*com.google.cloud.websecurityscanner.v1betaB\x17WebSecurityScannerProtoP\x01ZXgoogle.golang.org/genproto/googleapis/cloud/websecurityscanner/v1beta;websecurityscanner\xca\x02&Google\\Cloud\\WebSecurityScanner\\V1betab\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_client__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, + google_dot_api_dot_resource__pb2.DESCRIPTOR, google_dot_cloud_dot_websecurityscanner__v1beta_dot_proto_dot_crawled__url__pb2.DESCRIPTOR, google_dot_cloud_dot_websecurityscanner__v1beta_dot_proto_dot_finding__pb2.DESCRIPTOR, google_dot_cloud_dot_websecurityscanner__v1beta_dot_proto_dot_finding__type__stats__pb2.DESCRIPTOR, @@ -80,7 +86,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A-\n+cloudresourcemanager.googleapis.com/Project" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -98,7 +106,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -110,8 +118,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=528, - serialized_end=642, + serialized_start=614, + serialized_end=786, ) @@ -137,7 +145,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A.\n,websecurityscanner.googleapis.com/ScanConfig" + ), file=DESCRIPTOR, ) ], @@ -149,8 +159,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=644, - serialized_end=683, + serialized_start=788, + serialized_end=881, ) @@ -176,7 +186,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A.\n,websecurityscanner.googleapis.com/ScanConfig" + ), file=DESCRIPTOR, ) ], @@ -188,8 +200,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=685, - serialized_end=721, + serialized_start=883, + serialized_end=973, ) @@ -215,7 +227,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A-\n+cloudresourcemanager.googleapis.com/Project" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -263,8 +277,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=723, - serialized_end=802, + serialized_start=976, + serialized_end=1108, ) @@ -290,7 +304,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -308,7 +322,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -320,8 +334,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=805, - serialized_end=952, + serialized_start=1111, + serialized_end=1268, ) @@ -377,8 +391,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=954, - serialized_end=1078, + serialized_start=1270, + serialized_end=1394, ) @@ -404,7 +418,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A.\n,websecurityscanner.googleapis.com/ScanConfig" + ), file=DESCRIPTOR, ) ], @@ -416,8 +432,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1080, - serialized_end=1115, + serialized_start=1396, + serialized_end=1485, ) @@ -443,7 +459,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A+\n)websecurityscanner.googleapis.com/ScanRun" + ), file=DESCRIPTOR, ) ], @@ -455,8 +473,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1117, - serialized_end=1150, + serialized_start=1487, + serialized_end=1571, ) @@ -482,7 +500,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A.\n,websecurityscanner.googleapis.com/ScanConfig" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -530,8 +550,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1152, - serialized_end=1228, + serialized_start=1574, + serialized_end=1704, ) @@ -587,8 +607,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1230, - serialized_end=1345, + serialized_start=1706, + serialized_end=1821, ) @@ -614,7 +634,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A+\n)websecurityscanner.googleapis.com/ScanRun" + ), file=DESCRIPTOR, ) ], @@ -626,8 +648,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1347, - serialized_end=1381, + serialized_start=1823, + serialized_end=1908, ) @@ -653,7 +675,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A+\n)websecurityscanner.googleapis.com/ScanRun" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -701,8 +725,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1383, - serialized_end=1462, + serialized_start=1911, + serialized_end=2041, ) @@ -758,8 +782,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1464, - serialized_end=1588, + serialized_start=2043, + serialized_end=2167, ) @@ -785,7 +809,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A+\n)websecurityscanner.googleapis.com/Finding" + ), file=DESCRIPTOR, ) ], @@ -797,8 +823,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1590, - serialized_end=1623, + serialized_start=2169, + serialized_end=2253, ) @@ -824,7 +850,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A+\n)websecurityscanner.googleapis.com/ScanRun" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -842,7 +870,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -890,8 +918,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1625, - serialized_end=1717, + serialized_start=2256, + serialized_end=2404, ) @@ -947,8 +975,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1719, - serialized_end=1833, + serialized_start=2406, + serialized_end=2520, ) @@ -974,7 +1002,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A+\n)websecurityscanner.googleapis.com/ScanRun" + ), file=DESCRIPTOR, ) ], @@ -986,8 +1016,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1835, - serialized_end=1880, + serialized_start=2522, + serialized_end=2618, ) @@ -1025,8 +1055,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1882, - serialized_end=1998, + serialized_start=2620, + serialized_end=2736, ) _CREATESCANCONFIGREQUEST.fields_by_name[ @@ -1422,8 +1452,9 @@ resource name in the format 'projects/{projectId}/scanConfigs/ {scanConfigId}/scanRuns/{scanRunId}'. filter: - The filter expression. The expression must be in the format: . - Supported field: 'finding\_type'. Supported operator: '='. + Required. The filter expression. The expression must be in the + format: . Supported field: 'finding\_type'. Supported + operator: '='. page_token: A token identifying a page of results to be returned. This should be a ``next_page_token`` value returned from a previous @@ -1500,15 +1531,33 @@ DESCRIPTOR._options = None +_CREATESCANCONFIGREQUEST.fields_by_name["parent"]._options = None +_CREATESCANCONFIGREQUEST.fields_by_name["scan_config"]._options = None +_DELETESCANCONFIGREQUEST.fields_by_name["name"]._options = None +_GETSCANCONFIGREQUEST.fields_by_name["name"]._options = None +_LISTSCANCONFIGSREQUEST.fields_by_name["parent"]._options = None +_UPDATESCANCONFIGREQUEST.fields_by_name["scan_config"]._options = None +_UPDATESCANCONFIGREQUEST.fields_by_name["update_mask"]._options = None +_STARTSCANRUNREQUEST.fields_by_name["name"]._options = None +_GETSCANRUNREQUEST.fields_by_name["name"]._options = None +_LISTSCANRUNSREQUEST.fields_by_name["parent"]._options = None +_STOPSCANRUNREQUEST.fields_by_name["name"]._options = None +_LISTCRAWLEDURLSREQUEST.fields_by_name["parent"]._options = None +_GETFINDINGREQUEST.fields_by_name["name"]._options = None +_LISTFINDINGSREQUEST.fields_by_name["parent"]._options = None +_LISTFINDINGSREQUEST.fields_by_name["filter"]._options = None +_LISTFINDINGTYPESTATSREQUEST.fields_by_name["parent"]._options = None _WEBSECURITYSCANNER = _descriptor.ServiceDescriptor( name="WebSecurityScanner", full_name="google.cloud.websecurityscanner.v1beta.WebSecurityScanner", file=DESCRIPTOR, index=0, - serialized_options=None, - serialized_start=2001, - serialized_end=4603, + serialized_options=_b( + "\312A!websecurityscanner.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform" + ), + serialized_start=2739, + serialized_end=5569, methods=[ _descriptor.MethodDescriptor( name="CreateScanConfig", @@ -1518,7 +1567,7 @@ input_type=_CREATESCANCONFIGREQUEST, output_type=google_dot_cloud_dot_websecurityscanner__v1beta_dot_proto_dot_scan__config__pb2._SCANCONFIG, serialized_options=_b( - "\202\323\344\223\0026\"'/v1beta/{parent=projects/*}/scanConfigs:\013scan_config" + "\202\323\344\223\0026\"'/v1beta/{parent=projects/*}/scanConfigs:\013scan_config\332A\022parent,scan_config" ), ), _descriptor.MethodDescriptor( @@ -1529,7 +1578,7 @@ input_type=_DELETESCANCONFIGREQUEST, output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, serialized_options=_b( - "\202\323\344\223\002)*'/v1beta/{name=projects/*/scanConfigs/*}" + "\202\323\344\223\002)*'/v1beta/{name=projects/*/scanConfigs/*}\332A\004name" ), ), _descriptor.MethodDescriptor( @@ -1540,7 +1589,7 @@ input_type=_GETSCANCONFIGREQUEST, output_type=google_dot_cloud_dot_websecurityscanner__v1beta_dot_proto_dot_scan__config__pb2._SCANCONFIG, serialized_options=_b( - "\202\323\344\223\002)\022'/v1beta/{name=projects/*/scanConfigs/*}" + "\202\323\344\223\002)\022'/v1beta/{name=projects/*/scanConfigs/*}\332A\004name" ), ), _descriptor.MethodDescriptor( @@ -1551,7 +1600,7 @@ input_type=_LISTSCANCONFIGSREQUEST, output_type=_LISTSCANCONFIGSRESPONSE, serialized_options=_b( - "\202\323\344\223\002)\022'/v1beta/{parent=projects/*}/scanConfigs" + "\202\323\344\223\002)\022'/v1beta/{parent=projects/*}/scanConfigs\332A\006parent" ), ), _descriptor.MethodDescriptor( @@ -1562,7 +1611,7 @@ input_type=_UPDATESCANCONFIGREQUEST, output_type=google_dot_cloud_dot_websecurityscanner__v1beta_dot_proto_dot_scan__config__pb2._SCANCONFIG, serialized_options=_b( - "\202\323\344\223\002B23/v1beta/{scan_config.name=projects/*/scanConfigs/*}:\013scan_config" + "\202\323\344\223\002B23/v1beta/{scan_config.name=projects/*/scanConfigs/*}:\013scan_config\332A\027scan_config,update_mask" ), ), _descriptor.MethodDescriptor( @@ -1573,7 +1622,7 @@ input_type=_STARTSCANRUNREQUEST, output_type=google_dot_cloud_dot_websecurityscanner__v1beta_dot_proto_dot_scan__run__pb2._SCANRUN, serialized_options=_b( - '\202\323\344\223\0022"-/v1beta/{name=projects/*/scanConfigs/*}:start:\001*' + '\202\323\344\223\0022"-/v1beta/{name=projects/*/scanConfigs/*}:start:\001*\332A\004name' ), ), _descriptor.MethodDescriptor( @@ -1584,7 +1633,7 @@ input_type=_GETSCANRUNREQUEST, output_type=google_dot_cloud_dot_websecurityscanner__v1beta_dot_proto_dot_scan__run__pb2._SCANRUN, serialized_options=_b( - "\202\323\344\223\0024\0222/v1beta/{name=projects/*/scanConfigs/*/scanRuns/*}" + "\202\323\344\223\0024\0222/v1beta/{name=projects/*/scanConfigs/*/scanRuns/*}\332A\004name" ), ), _descriptor.MethodDescriptor( @@ -1595,7 +1644,7 @@ input_type=_LISTSCANRUNSREQUEST, output_type=_LISTSCANRUNSRESPONSE, serialized_options=_b( - "\202\323\344\223\0024\0222/v1beta/{parent=projects/*/scanConfigs/*}/scanRuns" + "\202\323\344\223\0024\0222/v1beta/{parent=projects/*/scanConfigs/*}/scanRuns\332A\006parent" ), ), _descriptor.MethodDescriptor( @@ -1606,7 +1655,7 @@ input_type=_STOPSCANRUNREQUEST, output_type=google_dot_cloud_dot_websecurityscanner__v1beta_dot_proto_dot_scan__run__pb2._SCANRUN, serialized_options=_b( - '\202\323\344\223\002<"7/v1beta/{name=projects/*/scanConfigs/*/scanRuns/*}:stop:\001*' + '\202\323\344\223\002<"7/v1beta/{name=projects/*/scanConfigs/*/scanRuns/*}:stop:\001*\332A\004name' ), ), _descriptor.MethodDescriptor( @@ -1617,7 +1666,7 @@ input_type=_LISTCRAWLEDURLSREQUEST, output_type=_LISTCRAWLEDURLSRESPONSE, serialized_options=_b( - "\202\323\344\223\002B\022@/v1beta/{parent=projects/*/scanConfigs/*/scanRuns/*}/crawledUrls" + "\202\323\344\223\002B\022@/v1beta/{parent=projects/*/scanConfigs/*/scanRuns/*}/crawledUrls\332A\006parent" ), ), _descriptor.MethodDescriptor( @@ -1628,7 +1677,7 @@ input_type=_GETFINDINGREQUEST, output_type=google_dot_cloud_dot_websecurityscanner__v1beta_dot_proto_dot_finding__pb2._FINDING, serialized_options=_b( - "\202\323\344\223\002?\022=/v1beta/{name=projects/*/scanConfigs/*/scanRuns/*/findings/*}" + "\202\323\344\223\002?\022=/v1beta/{name=projects/*/scanConfigs/*/scanRuns/*/findings/*}\332A\004name" ), ), _descriptor.MethodDescriptor( @@ -1639,7 +1688,7 @@ input_type=_LISTFINDINGSREQUEST, output_type=_LISTFINDINGSRESPONSE, serialized_options=_b( - "\202\323\344\223\002?\022=/v1beta/{parent=projects/*/scanConfigs/*/scanRuns/*}/findings" + "\202\323\344\223\002?\022=/v1beta/{parent=projects/*/scanConfigs/*/scanRuns/*}/findings\332A\rparent,filter" ), ), _descriptor.MethodDescriptor( @@ -1650,7 +1699,7 @@ input_type=_LISTFINDINGTYPESTATSREQUEST, output_type=_LISTFINDINGTYPESTATSRESPONSE, serialized_options=_b( - "\202\323\344\223\002G\022E/v1beta/{parent=projects/*/scanConfigs/*/scanRuns/*}/findingTypeStats" + "\202\323\344\223\002G\022E/v1beta/{parent=projects/*/scanConfigs/*/scanRuns/*}/findingTypeStats\332A\006parent" ), ), ], diff --git a/websecurityscanner/synth.metadata b/websecurityscanner/synth.metadata index b39c7e874de8..6ce821633e5b 100644 --- a/websecurityscanner/synth.metadata +++ b/websecurityscanner/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-08-06T12:52:30.431818Z", + "updateTime": "2019-10-10T12:50:02.032093Z", "sources": [ { "generator": { "name": "artman", - "version": "0.32.1", - "dockerImage": "googleapis/artman@sha256:a684d40ba9a4e15946f5f2ca6b4bd9fe301192f522e9de4fff622118775f309b" + "version": "0.38.0", + "dockerImage": "googleapis/artman@sha256:0d2f8d429110aeb8d82df6550ef4ede59d40df9062d260a1580fce688b0512bf" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "e699b0cba64ffddfae39633417180f1f65875896", - "internalRef": "261759677" + "sha": "10f91fa12f70e8e0209a45fc10807ed1f77c7e4e", + "internalRef": "273826591" } }, {