diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index f0f3b24b..894fb6bc 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:f946c75373c2b0040e8e318c5e85d0cf46bc6e61d0a01f3ef94d8de974ac6790 + digest: sha256:f62c53736eccb0c4934a3ea9316e0d57696bb49c1a7c86c726e9bb8a2f87dadf diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt index 05dc4672..096e4800 100644 --- a/.kokoro/requirements.txt +++ b/.kokoro/requirements.txt @@ -113,33 +113,28 @@ commonmark==0.9.1 \ --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \ --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9 # via rich -cryptography==38.0.3 \ - --hash=sha256:068147f32fa662c81aebab95c74679b401b12b57494872886eb5c1139250ec5d \ - --hash=sha256:06fc3cc7b6f6cca87bd56ec80a580c88f1da5306f505876a71c8cfa7050257dd \ - --hash=sha256:25c1d1f19729fb09d42e06b4bf9895212292cb27bb50229f5aa64d039ab29146 \ - --hash=sha256:402852a0aea73833d982cabb6d0c3bb582c15483d29fb7085ef2c42bfa7e38d7 \ - --hash=sha256:4e269dcd9b102c5a3d72be3c45d8ce20377b8076a43cbed6f660a1afe365e436 \ - --hash=sha256:5419a127426084933076132d317911e3c6eb77568a1ce23c3ac1e12d111e61e0 \ - --hash=sha256:554bec92ee7d1e9d10ded2f7e92a5d70c1f74ba9524947c0ba0c850c7b011828 \ - --hash=sha256:5e89468fbd2fcd733b5899333bc54d0d06c80e04cd23d8c6f3e0542358c6060b \ - --hash=sha256:65535bc550b70bd6271984d9863a37741352b4aad6fb1b3344a54e6950249b55 \ - --hash=sha256:6ab9516b85bebe7aa83f309bacc5f44a61eeb90d0b4ec125d2d003ce41932d36 \ - --hash=sha256:6addc3b6d593cd980989261dc1cce38263c76954d758c3c94de51f1e010c9a50 \ - --hash=sha256:728f2694fa743a996d7784a6194da430f197d5c58e2f4e278612b359f455e4a2 \ - --hash=sha256:785e4056b5a8b28f05a533fab69febf5004458e20dad7e2e13a3120d8ecec75a \ - --hash=sha256:78cf5eefac2b52c10398a42765bfa981ce2372cbc0457e6bf9658f41ec3c41d8 \ - --hash=sha256:7f836217000342d448e1c9a342e9163149e45d5b5eca76a30e84503a5a96cab0 \ - --hash=sha256:8d41a46251bf0634e21fac50ffd643216ccecfaf3701a063257fe0b2be1b6548 \ - --hash=sha256:984fe150f350a3c91e84de405fe49e688aa6092b3525f407a18b9646f6612320 \ - --hash=sha256:9b24bcff7853ed18a63cfb0c2b008936a9554af24af2fb146e16d8e1aed75748 \ - --hash=sha256:b1b35d9d3a65542ed2e9d90115dfd16bbc027b3f07ee3304fc83580f26e43249 \ - --hash=sha256:b1b52c9e5f8aa2b802d48bd693190341fae201ea51c7a167d69fc48b60e8a959 \ - --hash=sha256:bbf203f1a814007ce24bd4d51362991d5cb90ba0c177a9c08825f2cc304d871f \ - --hash=sha256:be243c7e2bfcf6cc4cb350c0d5cdf15ca6383bbcb2a8ef51d3c9411a9d4386f0 \ - --hash=sha256:bfbe6ee19615b07a98b1d2287d6a6073f734735b49ee45b11324d85efc4d5cbd \ - --hash=sha256:c46837ea467ed1efea562bbeb543994c2d1f6e800785bd5a2c98bc096f5cb220 \ - --hash=sha256:dfb4f4dd568de1b6af9f4cda334adf7d72cf5bc052516e1b2608b683375dd95c \ - --hash=sha256:ed7b00096790213e09eb11c97cc6e2b757f15f3d2f85833cd2d3ec3fe37c1722 +cryptography==39.0.1 \ + --hash=sha256:0f8da300b5c8af9f98111ffd512910bc792b4c77392a9523624680f7956a99d4 \ + --hash=sha256:35f7c7d015d474f4011e859e93e789c87d21f6f4880ebdc29896a60403328f1f \ + --hash=sha256:5aa67414fcdfa22cf052e640cb5ddc461924a045cacf325cd164e65312d99502 \ + --hash=sha256:5d2d8b87a490bfcd407ed9d49093793d0f75198a35e6eb1a923ce1ee86c62b41 \ + --hash=sha256:6687ef6d0a6497e2b58e7c5b852b53f62142cfa7cd1555795758934da363a965 \ + --hash=sha256:6f8ba7f0328b79f08bdacc3e4e66fb4d7aab0c3584e0bd41328dce5262e26b2e \ + --hash=sha256:706843b48f9a3f9b9911979761c91541e3d90db1ca905fd63fee540a217698bc \ + --hash=sha256:807ce09d4434881ca3a7594733669bd834f5b2c6d5c7e36f8c00f691887042ad \ + --hash=sha256:83e17b26de248c33f3acffb922748151d71827d6021d98c70e6c1a25ddd78505 \ + --hash=sha256:96f1157a7c08b5b189b16b47bc9db2332269d6680a196341bf30046330d15388 \ + --hash=sha256:aec5a6c9864be7df2240c382740fcf3b96928c46604eaa7f3091f58b878c0bb6 \ + --hash=sha256:b0afd054cd42f3d213bf82c629efb1ee5f22eba35bf0eec88ea9ea7304f511a2 \ + --hash=sha256:ced4e447ae29ca194449a3f1ce132ded8fcab06971ef5f618605aacaa612beac \ + --hash=sha256:d1f6198ee6d9148405e49887803907fe8962a23e6c6f83ea7d98f1c0de375695 \ + --hash=sha256:e124352fd3db36a9d4a21c1aa27fd5d051e621845cb87fb851c08f4f75ce8be6 \ + --hash=sha256:e422abdec8b5fa8462aa016786680720d78bdce7a30c652b7fadf83a4ba35336 \ + --hash=sha256:ef8b72fa70b348724ff1218267e7f7375b8de4e8194d1636ee60510aae104cd0 \ + --hash=sha256:f0c64d1bd842ca2633e74a1a28033d139368ad959872533b1bab8c80e8240a0c \ + --hash=sha256:f24077a3b5298a5a06a8e0536e3ea9ec60e4c7ac486755e5fb6e6ea9b3500106 \ + --hash=sha256:fdd188c8a6ef8769f148f88f859884507b954cc64db6b52f66ef199bb9ad660a \ + --hash=sha256:fe913f20024eb2cb2f323e42a64bdf2911bb9738a15dba7d3cce48151034e3a8 # via # gcp-releasetool # secretstorage diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 24e095ae..6a8e33bb 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "3.12.0" + ".": "3.13.0" } diff --git a/CHANGELOG.md b/CHANGELOG.md index 352007f2..de0dc8cf 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,18 @@ [1]: https://pypi.org/project/google-cloud-build/#history +## [3.13.0](https://github.com/googleapis/python-cloudbuild/compare/v3.12.0...v3.13.0) (2023-02-17) + + +### Features + +* Enable "rest" transport in Python for services supporting numeric enums ([0c4d84f](https://github.com/googleapis/python-cloudbuild/commit/0c4d84f6bec2e1097b140ad167785236ff52d11c)) + + +### Bug Fixes + +* Remove empty v2.CloudBuild definition ([0c4d84f](https://github.com/googleapis/python-cloudbuild/commit/0c4d84f6bec2e1097b140ad167785236ff52d11c)) + ## [3.12.0](https://github.com/googleapis/python-cloudbuild/compare/v3.11.1...v3.12.0) (2023-02-04) diff --git a/docs/cloudbuild_v2/cloud_build.rst b/docs/cloudbuild_v2/cloud_build.rst deleted file mode 100644 index a745d7b3..00000000 --- a/docs/cloudbuild_v2/cloud_build.rst +++ /dev/null @@ -1,6 +0,0 @@ -CloudBuild ----------------------------- - -.. automodule:: google.cloud.devtools.cloudbuild_v2.services.cloud_build - :members: - :inherited-members: diff --git a/docs/cloudbuild_v2/services.rst b/docs/cloudbuild_v2/services.rst index 27b527e0..c055be1a 100644 --- a/docs/cloudbuild_v2/services.rst +++ b/docs/cloudbuild_v2/services.rst @@ -3,5 +3,4 @@ Services for Google Cloud Devtools Cloudbuild v2 API .. toctree:: :maxdepth: 2 - cloud_build repository_manager diff --git a/google/cloud/devtools/cloudbuild/gapic_version.py b/google/cloud/devtools/cloudbuild/gapic_version.py index b5a6e376..77bce509 100644 --- a/google/cloud/devtools/cloudbuild/gapic_version.py +++ b/google/cloud/devtools/cloudbuild/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.12.0" # {x-release-please-version} +__version__ = "3.13.0" # {x-release-please-version} diff --git a/google/cloud/devtools/cloudbuild_v1/gapic_metadata.json b/google/cloud/devtools/cloudbuild_v1/gapic_metadata.json index 26aaf1e7..2648fd24 100644 --- a/google/cloud/devtools/cloudbuild_v1/gapic_metadata.json +++ b/google/cloud/devtools/cloudbuild_v1/gapic_metadata.json @@ -196,6 +196,101 @@ ] } } + }, + "rest": { + "libraryClient": "CloudBuildClient", + "rpcs": { + "ApproveBuild": { + "methods": [ + "approve_build" + ] + }, + "CancelBuild": { + "methods": [ + "cancel_build" + ] + }, + "CreateBuild": { + "methods": [ + "create_build" + ] + }, + "CreateBuildTrigger": { + "methods": [ + "create_build_trigger" + ] + }, + "CreateWorkerPool": { + "methods": [ + "create_worker_pool" + ] + }, + "DeleteBuildTrigger": { + "methods": [ + "delete_build_trigger" + ] + }, + "DeleteWorkerPool": { + "methods": [ + "delete_worker_pool" + ] + }, + "GetBuild": { + "methods": [ + "get_build" + ] + }, + "GetBuildTrigger": { + "methods": [ + "get_build_trigger" + ] + }, + "GetWorkerPool": { + "methods": [ + "get_worker_pool" + ] + }, + "ListBuildTriggers": { + "methods": [ + "list_build_triggers" + ] + }, + "ListBuilds": { + "methods": [ + "list_builds" + ] + }, + "ListWorkerPools": { + "methods": [ + "list_worker_pools" + ] + }, + "ReceiveTriggerWebhook": { + "methods": [ + "receive_trigger_webhook" + ] + }, + "RetryBuild": { + "methods": [ + "retry_build" + ] + }, + "RunBuildTrigger": { + "methods": [ + "run_build_trigger" + ] + }, + "UpdateBuildTrigger": { + "methods": [ + "update_build_trigger" + ] + }, + "UpdateWorkerPool": { + "methods": [ + "update_worker_pool" + ] + } + } } } } diff --git a/google/cloud/devtools/cloudbuild_v1/gapic_version.py b/google/cloud/devtools/cloudbuild_v1/gapic_version.py index b5a6e376..77bce509 100644 --- a/google/cloud/devtools/cloudbuild_v1/gapic_version.py +++ b/google/cloud/devtools/cloudbuild_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.12.0" # {x-release-please-version} +__version__ = "3.13.0" # {x-release-please-version} diff --git a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/client.py b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/client.py index 64dfeedb..0a6c9a5f 100644 --- a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/client.py +++ b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/client.py @@ -57,6 +57,7 @@ from .transports.base import CloudBuildTransport, DEFAULT_CLIENT_INFO from .transports.grpc import CloudBuildGrpcTransport from .transports.grpc_asyncio import CloudBuildGrpcAsyncIOTransport +from .transports.rest import CloudBuildRestTransport class CloudBuildClientMeta(type): @@ -70,6 +71,7 @@ class CloudBuildClientMeta(type): _transport_registry = OrderedDict() # type: Dict[str, Type[CloudBuildTransport]] _transport_registry["grpc"] = CloudBuildGrpcTransport _transport_registry["grpc_asyncio"] = CloudBuildGrpcAsyncIOTransport + _transport_registry["rest"] = CloudBuildRestTransport def get_transport_class( cls, diff --git a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/__init__.py b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/__init__.py index 9a42a282..0adcb604 100644 --- a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/__init__.py +++ b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/__init__.py @@ -19,15 +19,20 @@ from .base import CloudBuildTransport from .grpc import CloudBuildGrpcTransport from .grpc_asyncio import CloudBuildGrpcAsyncIOTransport +from .rest import CloudBuildRestTransport +from .rest import CloudBuildRestInterceptor # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[CloudBuildTransport]] _transport_registry["grpc"] = CloudBuildGrpcTransport _transport_registry["grpc_asyncio"] = CloudBuildGrpcAsyncIOTransport +_transport_registry["rest"] = CloudBuildRestTransport __all__ = ( "CloudBuildTransport", "CloudBuildGrpcTransport", "CloudBuildGrpcAsyncIOTransport", + "CloudBuildRestTransport", + "CloudBuildRestInterceptor", ) diff --git a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/rest.py b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/rest.py new file mode 100644 index 00000000..dd85ef42 --- /dev/null +++ b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/rest.py @@ -0,0 +1,2708 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from google.api_core import operations_v1 +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.cloud.devtools.cloudbuild_v1.types import cloudbuild +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore + +from .base import CloudBuildTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class CloudBuildRestInterceptor: + """Interceptor for CloudBuild. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the CloudBuildRestTransport. + + .. code-block:: python + class MyCustomCloudBuildInterceptor(CloudBuildRestInterceptor): + def pre_approve_build(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_approve_build(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_cancel_build(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_cancel_build(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_build(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_build(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_build_trigger(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_build_trigger(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_worker_pool(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_worker_pool(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_build_trigger(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_delete_worker_pool(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_worker_pool(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_build(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_build(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_build_trigger(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_build_trigger(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_worker_pool(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_worker_pool(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_builds(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_builds(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_build_triggers(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_build_triggers(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_worker_pools(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_worker_pools(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_receive_trigger_webhook(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_receive_trigger_webhook(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_retry_build(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_retry_build(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_run_build_trigger(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_run_build_trigger(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_build_trigger(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_build_trigger(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_worker_pool(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_worker_pool(self, response): + logging.log(f"Received response: {response}") + return response + + transport = CloudBuildRestTransport(interceptor=MyCustomCloudBuildInterceptor()) + client = CloudBuildClient(transport=transport) + + + """ + + def pre_approve_build( + self, + request: cloudbuild.ApproveBuildRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloudbuild.ApproveBuildRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for approve_build + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudBuild server. + """ + return request, metadata + + def post_approve_build( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for approve_build + + Override in a subclass to manipulate the response + after it is returned by the CloudBuild server but before + it is returned to user code. + """ + return response + + def pre_cancel_build( + self, + request: cloudbuild.CancelBuildRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloudbuild.CancelBuildRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for cancel_build + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudBuild server. + """ + return request, metadata + + def post_cancel_build(self, response: cloudbuild.Build) -> cloudbuild.Build: + """Post-rpc interceptor for cancel_build + + Override in a subclass to manipulate the response + after it is returned by the CloudBuild server but before + it is returned to user code. + """ + return response + + def pre_create_build( + self, + request: cloudbuild.CreateBuildRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloudbuild.CreateBuildRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_build + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudBuild server. + """ + return request, metadata + + def post_create_build( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_build + + Override in a subclass to manipulate the response + after it is returned by the CloudBuild server but before + it is returned to user code. + """ + return response + + def pre_create_build_trigger( + self, + request: cloudbuild.CreateBuildTriggerRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloudbuild.CreateBuildTriggerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_build_trigger + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudBuild server. + """ + return request, metadata + + def post_create_build_trigger( + self, response: cloudbuild.BuildTrigger + ) -> cloudbuild.BuildTrigger: + """Post-rpc interceptor for create_build_trigger + + Override in a subclass to manipulate the response + after it is returned by the CloudBuild server but before + it is returned to user code. + """ + return response + + def pre_create_worker_pool( + self, + request: cloudbuild.CreateWorkerPoolRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloudbuild.CreateWorkerPoolRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_worker_pool + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudBuild server. + """ + return request, metadata + + def post_create_worker_pool( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_worker_pool + + Override in a subclass to manipulate the response + after it is returned by the CloudBuild server but before + it is returned to user code. + """ + return response + + def pre_delete_build_trigger( + self, + request: cloudbuild.DeleteBuildTriggerRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloudbuild.DeleteBuildTriggerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_build_trigger + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudBuild server. + """ + return request, metadata + + def pre_delete_worker_pool( + self, + request: cloudbuild.DeleteWorkerPoolRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloudbuild.DeleteWorkerPoolRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_worker_pool + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudBuild server. + """ + return request, metadata + + def post_delete_worker_pool( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_worker_pool + + Override in a subclass to manipulate the response + after it is returned by the CloudBuild server but before + it is returned to user code. + """ + return response + + def pre_get_build( + self, request: cloudbuild.GetBuildRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[cloudbuild.GetBuildRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_build + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudBuild server. + """ + return request, metadata + + def post_get_build(self, response: cloudbuild.Build) -> cloudbuild.Build: + """Post-rpc interceptor for get_build + + Override in a subclass to manipulate the response + after it is returned by the CloudBuild server but before + it is returned to user code. + """ + return response + + def pre_get_build_trigger( + self, + request: cloudbuild.GetBuildTriggerRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloudbuild.GetBuildTriggerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_build_trigger + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudBuild server. + """ + return request, metadata + + def post_get_build_trigger( + self, response: cloudbuild.BuildTrigger + ) -> cloudbuild.BuildTrigger: + """Post-rpc interceptor for get_build_trigger + + Override in a subclass to manipulate the response + after it is returned by the CloudBuild server but before + it is returned to user code. + """ + return response + + def pre_get_worker_pool( + self, + request: cloudbuild.GetWorkerPoolRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloudbuild.GetWorkerPoolRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_worker_pool + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudBuild server. + """ + return request, metadata + + def post_get_worker_pool( + self, response: cloudbuild.WorkerPool + ) -> cloudbuild.WorkerPool: + """Post-rpc interceptor for get_worker_pool + + Override in a subclass to manipulate the response + after it is returned by the CloudBuild server but before + it is returned to user code. + """ + return response + + def pre_list_builds( + self, request: cloudbuild.ListBuildsRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[cloudbuild.ListBuildsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_builds + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudBuild server. + """ + return request, metadata + + def post_list_builds( + self, response: cloudbuild.ListBuildsResponse + ) -> cloudbuild.ListBuildsResponse: + """Post-rpc interceptor for list_builds + + Override in a subclass to manipulate the response + after it is returned by the CloudBuild server but before + it is returned to user code. + """ + return response + + def pre_list_build_triggers( + self, + request: cloudbuild.ListBuildTriggersRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloudbuild.ListBuildTriggersRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_build_triggers + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudBuild server. + """ + return request, metadata + + def post_list_build_triggers( + self, response: cloudbuild.ListBuildTriggersResponse + ) -> cloudbuild.ListBuildTriggersResponse: + """Post-rpc interceptor for list_build_triggers + + Override in a subclass to manipulate the response + after it is returned by the CloudBuild server but before + it is returned to user code. + """ + return response + + def pre_list_worker_pools( + self, + request: cloudbuild.ListWorkerPoolsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloudbuild.ListWorkerPoolsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_worker_pools + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudBuild server. + """ + return request, metadata + + def post_list_worker_pools( + self, response: cloudbuild.ListWorkerPoolsResponse + ) -> cloudbuild.ListWorkerPoolsResponse: + """Post-rpc interceptor for list_worker_pools + + Override in a subclass to manipulate the response + after it is returned by the CloudBuild server but before + it is returned to user code. + """ + return response + + def pre_receive_trigger_webhook( + self, + request: cloudbuild.ReceiveTriggerWebhookRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloudbuild.ReceiveTriggerWebhookRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for receive_trigger_webhook + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudBuild server. + """ + return request, metadata + + def post_receive_trigger_webhook( + self, response: cloudbuild.ReceiveTriggerWebhookResponse + ) -> cloudbuild.ReceiveTriggerWebhookResponse: + """Post-rpc interceptor for receive_trigger_webhook + + Override in a subclass to manipulate the response + after it is returned by the CloudBuild server but before + it is returned to user code. + """ + return response + + def pre_retry_build( + self, request: cloudbuild.RetryBuildRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[cloudbuild.RetryBuildRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for retry_build + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudBuild server. + """ + return request, metadata + + def post_retry_build( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for retry_build + + Override in a subclass to manipulate the response + after it is returned by the CloudBuild server but before + it is returned to user code. + """ + return response + + def pre_run_build_trigger( + self, + request: cloudbuild.RunBuildTriggerRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloudbuild.RunBuildTriggerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for run_build_trigger + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudBuild server. + """ + return request, metadata + + def post_run_build_trigger( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for run_build_trigger + + Override in a subclass to manipulate the response + after it is returned by the CloudBuild server but before + it is returned to user code. + """ + return response + + def pre_update_build_trigger( + self, + request: cloudbuild.UpdateBuildTriggerRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloudbuild.UpdateBuildTriggerRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_build_trigger + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudBuild server. + """ + return request, metadata + + def post_update_build_trigger( + self, response: cloudbuild.BuildTrigger + ) -> cloudbuild.BuildTrigger: + """Post-rpc interceptor for update_build_trigger + + Override in a subclass to manipulate the response + after it is returned by the CloudBuild server but before + it is returned to user code. + """ + return response + + def pre_update_worker_pool( + self, + request: cloudbuild.UpdateWorkerPoolRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[cloudbuild.UpdateWorkerPoolRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_worker_pool + + Override in a subclass to manipulate the request or metadata + before they are sent to the CloudBuild server. + """ + return request, metadata + + def post_update_worker_pool( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_worker_pool + + Override in a subclass to manipulate the response + after it is returned by the CloudBuild server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class CloudBuildRestStub: + _session: AuthorizedSession + _host: str + _interceptor: CloudBuildRestInterceptor + + +class CloudBuildRestTransport(CloudBuildTransport): + """REST backend transport for CloudBuild. + + Creates and manages builds on Google Cloud Platform. + + The main concept used by this API is a ``Build``, which describes + the location of the source to build, how to build the source, and + where to store the built artifacts, if any. + + A user can list previously-requested builds or get builds by their + ID to determine the status of the build. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "cloudbuild.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[CloudBuildRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or CloudBuildRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.CancelOperation": [ + { + "method": "post", + "uri": "/v1/{name=operations/**}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, + ], + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v1/{name=operations/**}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v1", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _ApproveBuild(CloudBuildRestStub): + def __hash__(self): + return hash("ApproveBuild") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudbuild.ApproveBuildRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the approve build method over HTTP. + + Args: + request (~.cloudbuild.ApproveBuildRequest): + The request object. Request to approve or reject a + pending build. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/builds/*}:approve", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/builds/*}:approve", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_approve_build(request, metadata) + pb_request = cloudbuild.ApproveBuildRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_approve_build(resp) + return resp + + class _CancelBuild(CloudBuildRestStub): + def __hash__(self): + return hash("CancelBuild") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudbuild.CancelBuildRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudbuild.Build: + r"""Call the cancel build method over HTTP. + + Args: + request (~.cloudbuild.CancelBuildRequest): + The request object. Request to cancel an ongoing build. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloudbuild.Build: + A build resource in the Cloud Build API. + + At a high level, a ``Build`` describes where to find + source code, how to build it (for example, the builder + image to run on the source), and where to store the + built artifacts. + + Fields can include the following variables, which will + be expanded when the build is created: + + - $PROJECT_ID: the project ID of the build. + - $PROJECT_NUMBER: the project number of the build. + - $BUILD_ID: the autogenerated ID of the build. + - $REPO_NAME: the source repository name specified by + RepoSource. + - $BRANCH_NAME: the branch name specified by + RepoSource. + - $TAG_NAME: the tag name specified by RepoSource. + - $REVISION_ID or $COMMIT_SHA: the commit SHA specified + by RepoSource or resolved from the specified branch + or tag. + - $SHORT_SHA: first 7 characters of $REVISION_ID or + $COMMIT_SHA. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/projects/{project_id}/builds/{id}:cancel", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/builds/*}:cancel", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_cancel_build(request, metadata) + pb_request = cloudbuild.CancelBuildRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloudbuild.Build() + pb_resp = cloudbuild.Build.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_cancel_build(resp) + return resp + + class _CreateBuild(CloudBuildRestStub): + def __hash__(self): + return hash("CreateBuild") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudbuild.CreateBuildRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create build method over HTTP. + + Args: + request (~.cloudbuild.CreateBuildRequest): + The request object. Request to create a new build. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/projects/{project_id}/builds", + "body": "build", + }, + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/builds", + "body": "build", + }, + ] + request, metadata = self._interceptor.pre_create_build(request, metadata) + pb_request = cloudbuild.CreateBuildRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_build(resp) + return resp + + class _CreateBuildTrigger(CloudBuildRestStub): + def __hash__(self): + return hash("CreateBuildTrigger") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudbuild.CreateBuildTriggerRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudbuild.BuildTrigger: + r"""Call the create build trigger method over HTTP. + + Args: + request (~.cloudbuild.CreateBuildTriggerRequest): + The request object. Request to create a new ``BuildTrigger``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloudbuild.BuildTrigger: + Configuration for an automated build + in response to source repository + changes. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/projects/{project_id}/triggers", + "body": "trigger", + }, + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/triggers", + "body": "trigger", + }, + ] + request, metadata = self._interceptor.pre_create_build_trigger( + request, metadata + ) + pb_request = cloudbuild.CreateBuildTriggerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloudbuild.BuildTrigger() + pb_resp = cloudbuild.BuildTrigger.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_build_trigger(resp) + return resp + + class _CreateWorkerPool(CloudBuildRestStub): + def __hash__(self): + return hash("CreateWorkerPool") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = { + "workerPoolId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudbuild.CreateWorkerPoolRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the create worker pool method over HTTP. + + Args: + request (~.cloudbuild.CreateWorkerPoolRequest): + The request object. Request to create a new ``WorkerPool``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/workerPools", + "body": "worker_pool", + }, + ] + request, metadata = self._interceptor.pre_create_worker_pool( + request, metadata + ) + pb_request = cloudbuild.CreateWorkerPoolRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_worker_pool(resp) + return resp + + class _DeleteBuildTrigger(CloudBuildRestStub): + def __hash__(self): + return hash("DeleteBuildTrigger") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudbuild.DeleteBuildTriggerRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete build trigger method over HTTP. + + Args: + request (~.cloudbuild.DeleteBuildTriggerRequest): + The request object. Request to delete a ``BuildTrigger``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/projects/{project_id}/triggers/{trigger_id}", + }, + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/triggers/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_build_trigger( + request, metadata + ) + pb_request = cloudbuild.DeleteBuildTriggerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DeleteWorkerPool(CloudBuildRestStub): + def __hash__(self): + return hash("DeleteWorkerPool") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudbuild.DeleteWorkerPoolRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete worker pool method over HTTP. + + Args: + request (~.cloudbuild.DeleteWorkerPoolRequest): + The request object. Request to delete a ``WorkerPool``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/workerPools/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_worker_pool( + request, metadata + ) + pb_request = cloudbuild.DeleteWorkerPoolRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_worker_pool(resp) + return resp + + class _GetBuild(CloudBuildRestStub): + def __hash__(self): + return hash("GetBuild") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudbuild.GetBuildRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudbuild.Build: + r"""Call the get build method over HTTP. + + Args: + request (~.cloudbuild.GetBuildRequest): + The request object. Request to get a build. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloudbuild.Build: + A build resource in the Cloud Build API. + + At a high level, a ``Build`` describes where to find + source code, how to build it (for example, the builder + image to run on the source), and where to store the + built artifacts. + + Fields can include the following variables, which will + be expanded when the build is created: + + - $PROJECT_ID: the project ID of the build. + - $PROJECT_NUMBER: the project number of the build. + - $BUILD_ID: the autogenerated ID of the build. + - $REPO_NAME: the source repository name specified by + RepoSource. + - $BRANCH_NAME: the branch name specified by + RepoSource. + - $TAG_NAME: the tag name specified by RepoSource. + - $REVISION_ID or $COMMIT_SHA: the commit SHA specified + by RepoSource or resolved from the specified branch + or tag. + - $SHORT_SHA: first 7 characters of $REVISION_ID or + $COMMIT_SHA. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/projects/{project_id}/builds/{id}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/builds/*}", + }, + ] + request, metadata = self._interceptor.pre_get_build(request, metadata) + pb_request = cloudbuild.GetBuildRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloudbuild.Build() + pb_resp = cloudbuild.Build.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_build(resp) + return resp + + class _GetBuildTrigger(CloudBuildRestStub): + def __hash__(self): + return hash("GetBuildTrigger") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudbuild.GetBuildTriggerRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudbuild.BuildTrigger: + r"""Call the get build trigger method over HTTP. + + Args: + request (~.cloudbuild.GetBuildTriggerRequest): + The request object. Returns the ``BuildTrigger`` with the specified ID. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloudbuild.BuildTrigger: + Configuration for an automated build + in response to source repository + changes. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/projects/{project_id}/triggers/{trigger_id}", + }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/triggers/*}", + }, + ] + request, metadata = self._interceptor.pre_get_build_trigger( + request, metadata + ) + pb_request = cloudbuild.GetBuildTriggerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloudbuild.BuildTrigger() + pb_resp = cloudbuild.BuildTrigger.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_build_trigger(resp) + return resp + + class _GetWorkerPool(CloudBuildRestStub): + def __hash__(self): + return hash("GetWorkerPool") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudbuild.GetWorkerPoolRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudbuild.WorkerPool: + r"""Call the get worker pool method over HTTP. + + Args: + request (~.cloudbuild.GetWorkerPoolRequest): + The request object. Request to get a ``WorkerPool`` with the specified name. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloudbuild.WorkerPool: + Configuration for a ``WorkerPool``. + + Cloud Build owns and maintains a pool of workers for + general use and have no access to a project's private + network. By default, builds submitted to Cloud Build + will use a worker from this pool. + + If your build needs access to resources on a private + network, create and use a ``WorkerPool`` to run your + builds. Private ``WorkerPool``\ s give your builds + access to any single VPC network that you administer, + including any on-prem resources connected to that VPC + network. For an overview of private pools, see `Private + pools + overview `__. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/workerPools/*}", + }, + ] + request, metadata = self._interceptor.pre_get_worker_pool(request, metadata) + pb_request = cloudbuild.GetWorkerPoolRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloudbuild.WorkerPool() + pb_resp = cloudbuild.WorkerPool.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_worker_pool(resp) + return resp + + class _ListBuilds(CloudBuildRestStub): + def __hash__(self): + return hash("ListBuilds") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudbuild.ListBuildsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudbuild.ListBuildsResponse: + r"""Call the list builds method over HTTP. + + Args: + request (~.cloudbuild.ListBuildsRequest): + The request object. Request to list builds. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloudbuild.ListBuildsResponse: + Response including listed builds. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/projects/{project_id}/builds", + }, + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/builds", + }, + ] + request, metadata = self._interceptor.pre_list_builds(request, metadata) + pb_request = cloudbuild.ListBuildsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloudbuild.ListBuildsResponse() + pb_resp = cloudbuild.ListBuildsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_builds(resp) + return resp + + class _ListBuildTriggers(CloudBuildRestStub): + def __hash__(self): + return hash("ListBuildTriggers") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudbuild.ListBuildTriggersRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudbuild.ListBuildTriggersResponse: + r"""Call the list build triggers method over HTTP. + + Args: + request (~.cloudbuild.ListBuildTriggersRequest): + The request object. Request to list existing ``BuildTriggers``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloudbuild.ListBuildTriggersResponse: + Response containing existing ``BuildTriggers``. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/projects/{project_id}/triggers", + }, + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/triggers", + }, + ] + request, metadata = self._interceptor.pre_list_build_triggers( + request, metadata + ) + pb_request = cloudbuild.ListBuildTriggersRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloudbuild.ListBuildTriggersResponse() + pb_resp = cloudbuild.ListBuildTriggersResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_build_triggers(resp) + return resp + + class _ListWorkerPools(CloudBuildRestStub): + def __hash__(self): + return hash("ListWorkerPools") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudbuild.ListWorkerPoolsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudbuild.ListWorkerPoolsResponse: + r"""Call the list worker pools method over HTTP. + + Args: + request (~.cloudbuild.ListWorkerPoolsRequest): + The request object. Request to list ``WorkerPool``\ s. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloudbuild.ListWorkerPoolsResponse: + Response containing existing ``WorkerPools``. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/workerPools", + }, + ] + request, metadata = self._interceptor.pre_list_worker_pools( + request, metadata + ) + pb_request = cloudbuild.ListWorkerPoolsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloudbuild.ListWorkerPoolsResponse() + pb_resp = cloudbuild.ListWorkerPoolsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_worker_pools(resp) + return resp + + class _ReceiveTriggerWebhook(CloudBuildRestStub): + def __hash__(self): + return hash("ReceiveTriggerWebhook") + + def __call__( + self, + request: cloudbuild.ReceiveTriggerWebhookRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudbuild.ReceiveTriggerWebhookResponse: + r"""Call the receive trigger webhook method over HTTP. + + Args: + request (~.cloudbuild.ReceiveTriggerWebhookRequest): + The request object. ReceiveTriggerWebhookRequest [Experimental] is the + request object accepted by the ReceiveTriggerWebhook + method. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloudbuild.ReceiveTriggerWebhookResponse: + ReceiveTriggerWebhookResponse [Experimental] is the + response object for the ReceiveTriggerWebhook method. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/projects/{project_id}/triggers/{trigger}:webhook", + "body": "body", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/triggers/*}:webhook", + "body": "body", + }, + ] + request, metadata = self._interceptor.pre_receive_trigger_webhook( + request, metadata + ) + pb_request = cloudbuild.ReceiveTriggerWebhookRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloudbuild.ReceiveTriggerWebhookResponse() + pb_resp = cloudbuild.ReceiveTriggerWebhookResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_receive_trigger_webhook(resp) + return resp + + class _RetryBuild(CloudBuildRestStub): + def __hash__(self): + return hash("RetryBuild") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudbuild.RetryBuildRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the retry build method over HTTP. + + Args: + request (~.cloudbuild.RetryBuildRequest): + The request object. Specifies a build to retry. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/projects/{project_id}/builds/{id}:retry", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/builds/*}:retry", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_retry_build(request, metadata) + pb_request = cloudbuild.RetryBuildRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_retry_build(resp) + return resp + + class _RunBuildTrigger(CloudBuildRestStub): + def __hash__(self): + return hash("RunBuildTrigger") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudbuild.RunBuildTriggerRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the run build trigger method over HTTP. + + Args: + request (~.cloudbuild.RunBuildTriggerRequest): + The request object. Specifies a build trigger to run and + the source to use. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/projects/{project_id}/triggers/{trigger_id}:run", + "body": "source", + }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/triggers/*}:run", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_run_build_trigger( + request, metadata + ) + pb_request = cloudbuild.RunBuildTriggerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_run_build_trigger(resp) + return resp + + class _UpdateBuildTrigger(CloudBuildRestStub): + def __hash__(self): + return hash("UpdateBuildTrigger") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudbuild.UpdateBuildTriggerRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> cloudbuild.BuildTrigger: + r"""Call the update build trigger method over HTTP. + + Args: + request (~.cloudbuild.UpdateBuildTriggerRequest): + The request object. Request to update an existing ``BuildTrigger``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.cloudbuild.BuildTrigger: + Configuration for an automated build + in response to source repository + changes. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/projects/{project_id}/triggers/{trigger_id}", + "body": "trigger", + }, + { + "method": "patch", + "uri": "/v1/{trigger.resource_name=projects/*/locations/*/triggers/*}", + "body": "trigger", + }, + ] + request, metadata = self._interceptor.pre_update_build_trigger( + request, metadata + ) + pb_request = cloudbuild.UpdateBuildTriggerRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = cloudbuild.BuildTrigger() + pb_resp = cloudbuild.BuildTrigger.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_build_trigger(resp) + return resp + + class _UpdateWorkerPool(CloudBuildRestStub): + def __hash__(self): + return hash("UpdateWorkerPool") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, str] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: cloudbuild.UpdateWorkerPoolRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Call the update worker pool method over HTTP. + + Args: + request (~.cloudbuild.UpdateWorkerPoolRequest): + The request object. Request to update a ``WorkerPool``. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{worker_pool.name=projects/*/locations/*/workerPools/*}", + "body": "worker_pool", + }, + ] + request, metadata = self._interceptor.pre_update_worker_pool( + request, metadata + ) + pb_request = cloudbuild.UpdateWorkerPoolRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_worker_pool(resp) + return resp + + @property + def approve_build( + self, + ) -> Callable[[cloudbuild.ApproveBuildRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ApproveBuild(self._session, self._host, self._interceptor) # type: ignore + + @property + def cancel_build( + self, + ) -> Callable[[cloudbuild.CancelBuildRequest], cloudbuild.Build]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CancelBuild(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_build( + self, + ) -> Callable[[cloudbuild.CreateBuildRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateBuild(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_build_trigger( + self, + ) -> Callable[[cloudbuild.CreateBuildTriggerRequest], cloudbuild.BuildTrigger]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateBuildTrigger(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_worker_pool( + self, + ) -> Callable[[cloudbuild.CreateWorkerPoolRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateWorkerPool(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_build_trigger( + self, + ) -> Callable[[cloudbuild.DeleteBuildTriggerRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteBuildTrigger(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_worker_pool( + self, + ) -> Callable[[cloudbuild.DeleteWorkerPoolRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteWorkerPool(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_build(self) -> Callable[[cloudbuild.GetBuildRequest], cloudbuild.Build]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetBuild(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_build_trigger( + self, + ) -> Callable[[cloudbuild.GetBuildTriggerRequest], cloudbuild.BuildTrigger]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetBuildTrigger(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_worker_pool( + self, + ) -> Callable[[cloudbuild.GetWorkerPoolRequest], cloudbuild.WorkerPool]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetWorkerPool(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_builds( + self, + ) -> Callable[[cloudbuild.ListBuildsRequest], cloudbuild.ListBuildsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListBuilds(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_build_triggers( + self, + ) -> Callable[ + [cloudbuild.ListBuildTriggersRequest], cloudbuild.ListBuildTriggersResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListBuildTriggers(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_worker_pools( + self, + ) -> Callable[ + [cloudbuild.ListWorkerPoolsRequest], cloudbuild.ListWorkerPoolsResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListWorkerPools(self._session, self._host, self._interceptor) # type: ignore + + @property + def receive_trigger_webhook( + self, + ) -> Callable[ + [cloudbuild.ReceiveTriggerWebhookRequest], + cloudbuild.ReceiveTriggerWebhookResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ReceiveTriggerWebhook(self._session, self._host, self._interceptor) # type: ignore + + @property + def retry_build( + self, + ) -> Callable[[cloudbuild.RetryBuildRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RetryBuild(self._session, self._host, self._interceptor) # type: ignore + + @property + def run_build_trigger( + self, + ) -> Callable[[cloudbuild.RunBuildTriggerRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RunBuildTrigger(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_build_trigger( + self, + ) -> Callable[[cloudbuild.UpdateBuildTriggerRequest], cloudbuild.BuildTrigger]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateBuildTrigger(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_worker_pool( + self, + ) -> Callable[[cloudbuild.UpdateWorkerPoolRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateWorkerPool(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("CloudBuildRestTransport",) diff --git a/google/cloud/devtools/cloudbuild_v2/__init__.py b/google/cloud/devtools/cloudbuild_v2/__init__.py index 071eff57..d8f32fb0 100644 --- a/google/cloud/devtools/cloudbuild_v2/__init__.py +++ b/google/cloud/devtools/cloudbuild_v2/__init__.py @@ -18,8 +18,6 @@ __version__ = package_version.__version__ -from .services.cloud_build import CloudBuildClient -from .services.cloud_build import CloudBuildAsyncClient from .services.repository_manager import RepositoryManagerClient from .services.repository_manager import RepositoryManagerAsyncClient @@ -53,11 +51,9 @@ from .types.repositories import UpdateConnectionRequest __all__ = ( - "CloudBuildAsyncClient", "RepositoryManagerAsyncClient", "BatchCreateRepositoriesRequest", "BatchCreateRepositoriesResponse", - "CloudBuildClient", "Connection", "CreateConnectionRequest", "CreateRepositoryRequest", diff --git a/google/cloud/devtools/cloudbuild_v2/gapic_metadata.json b/google/cloud/devtools/cloudbuild_v2/gapic_metadata.json index ac24f643..e4616bdb 100644 --- a/google/cloud/devtools/cloudbuild_v2/gapic_metadata.json +++ b/google/cloud/devtools/cloudbuild_v2/gapic_metadata.json @@ -5,19 +5,6 @@ "protoPackage": "google.devtools.cloudbuild.v2", "schema": "1.0", "services": { - "CloudBuild": { - "clients": { - "grpc": { - "libraryClient": "CloudBuildClient" - }, - "grpc-async": { - "libraryClient": "CloudBuildAsyncClient" - }, - "rest": { - "libraryClient": "CloudBuildClient" - } - } - }, "RepositoryManager": { "clients": { "grpc": { diff --git a/google/cloud/devtools/cloudbuild_v2/gapic_version.py b/google/cloud/devtools/cloudbuild_v2/gapic_version.py index b5a6e376..77bce509 100644 --- a/google/cloud/devtools/cloudbuild_v2/gapic_version.py +++ b/google/cloud/devtools/cloudbuild_v2/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "3.12.0" # {x-release-please-version} +__version__ = "3.13.0" # {x-release-please-version} diff --git a/google/cloud/devtools/cloudbuild_v2/services/cloud_build/__init__.py b/google/cloud/devtools/cloudbuild_v2/services/cloud_build/__init__.py deleted file mode 100644 index b740d08b..00000000 --- a/google/cloud/devtools/cloudbuild_v2/services/cloud_build/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import CloudBuildClient -from .async_client import CloudBuildAsyncClient - -__all__ = ( - "CloudBuildClient", - "CloudBuildAsyncClient", -) diff --git a/google/cloud/devtools/cloudbuild_v2/services/cloud_build/async_client.py b/google/cloud/devtools/cloudbuild_v2/services/cloud_build/async_client.py deleted file mode 100644 index 61250110..00000000 --- a/google/cloud/devtools/cloudbuild_v2/services/cloud_build/async_client.py +++ /dev/null @@ -1,630 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import functools -import re -from typing import ( - Dict, - Mapping, - MutableMapping, - MutableSequence, - Optional, - Sequence, - Tuple, - Type, - Union, -) - -from google.cloud.devtools.cloudbuild_v2 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 -from .transports.base import CloudBuildTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import CloudBuildGrpcAsyncIOTransport -from .client import CloudBuildClient - - -class CloudBuildAsyncClient: - """Creates and manages builds on Google Cloud Platform.""" - - _client: CloudBuildClient - - DEFAULT_ENDPOINT = CloudBuildClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = CloudBuildClient.DEFAULT_MTLS_ENDPOINT - - common_billing_account_path = staticmethod( - CloudBuildClient.common_billing_account_path - ) - parse_common_billing_account_path = staticmethod( - CloudBuildClient.parse_common_billing_account_path - ) - common_folder_path = staticmethod(CloudBuildClient.common_folder_path) - parse_common_folder_path = staticmethod(CloudBuildClient.parse_common_folder_path) - common_organization_path = staticmethod(CloudBuildClient.common_organization_path) - parse_common_organization_path = staticmethod( - CloudBuildClient.parse_common_organization_path - ) - common_project_path = staticmethod(CloudBuildClient.common_project_path) - parse_common_project_path = staticmethod(CloudBuildClient.parse_common_project_path) - common_location_path = staticmethod(CloudBuildClient.common_location_path) - parse_common_location_path = staticmethod( - CloudBuildClient.parse_common_location_path - ) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - CloudBuildAsyncClient: The constructed client. - """ - return CloudBuildClient.from_service_account_info.__func__(CloudBuildAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - CloudBuildAsyncClient: The constructed client. - """ - return CloudBuildClient.from_service_account_file.__func__(CloudBuildAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source( - cls, client_options: Optional[ClientOptions] = None - ): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return CloudBuildClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> CloudBuildTransport: - """Returns the transport used by the client instance. - - Returns: - CloudBuildTransport: The transport used by the client instance. - """ - return self._client.transport - - get_transport_class = functools.partial( - type(CloudBuildClient).get_transport_class, type(CloudBuildClient) - ) - - def __init__( - self, - *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, CloudBuildTransport] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the cloud build client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, ~.CloudBuildTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = CloudBuildClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - ) - - async def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def set_iam_policy( - self, - request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Sets the IAM access control policy on the specified function. - - Replaces any existing policy. - - Args: - request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): - The request object. Request message for `SetIamPolicy` - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - - **JSON Example** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - - **YAML Example** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the `IAM - developer's - guide `__. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.SetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.set_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_iam_policy( - self, - request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Gets the IAM access control policy for a function. - - Returns an empty policy if the function exists and does not have a - policy set. - - Args: - request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): - The request object. Request message for `GetIamPolicy` - method. - retry (google.api_core.retry.Retry): Designation of what errors, if - any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - - **JSON Example** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - - **YAML Example** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the `IAM - developer's - guide `__. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.GetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.get_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def test_iam_permissions( - self, - request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Tests the specified IAM permissions against the IAM access control - policy for a function. - - If the function does not exist, this will return an empty set - of permissions, not a NOT_FOUND error. - - Args: - request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): - The request object. Request message for - `TestIamPermissions` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.iam_policy_pb2.TestIamPermissionsResponse: - Response message for ``TestIamPermissions`` method. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._client._transport.test_iam_permissions, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def __aenter__(self): - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=package_version.__version__ -) - - -__all__ = ("CloudBuildAsyncClient",) diff --git a/google/cloud/devtools/cloudbuild_v2/services/cloud_build/client.py b/google/cloud/devtools/cloudbuild_v2/services/cloud_build/client.py deleted file mode 100644 index e2312148..00000000 --- a/google/cloud/devtools/cloudbuild_v2/services/cloud_build/client.py +++ /dev/null @@ -1,850 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import os -import re -from typing import ( - Dict, - Mapping, - MutableMapping, - MutableSequence, - Optional, - Sequence, - Tuple, - Type, - Union, - cast, -) - -from google.cloud.devtools.cloudbuild_v2 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 -from .transports.base import CloudBuildTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import CloudBuildGrpcTransport -from .transports.grpc_asyncio import CloudBuildGrpcAsyncIOTransport -from .transports.rest import CloudBuildRestTransport - - -class CloudBuildClientMeta(type): - """Metaclass for the CloudBuild client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - - _transport_registry = OrderedDict() # type: Dict[str, Type[CloudBuildTransport]] - _transport_registry["grpc"] = CloudBuildGrpcTransport - _transport_registry["grpc_asyncio"] = CloudBuildGrpcAsyncIOTransport - _transport_registry["rest"] = CloudBuildRestTransport - - def get_transport_class( - cls, - label: Optional[str] = None, - ) -> Type[CloudBuildTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class CloudBuildClient(metaclass=CloudBuildClientMeta): - """Creates and manages builds on Google Cloud Platform.""" - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - DEFAULT_ENDPOINT = "cloudbuild.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - CloudBuildClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - CloudBuildClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file(filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> CloudBuildTransport: - """Returns the transport used by the client instance. - - Returns: - CloudBuildTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def common_billing_account_path( - billing_account: str, - ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format( - billing_account=billing_account, - ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str, str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path( - folder: str, - ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format( - folder=folder, - ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str, str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path( - organization: str, - ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format( - organization=organization, - ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str, str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path( - project: str, - ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format( - project=project, - ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str, str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path( - project: str, - location: str, - ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format( - project=project, - location=location, - ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str, str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source( - cls, client_options: Optional[client_options_lib.ClientOptions] = None - ): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError( - "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - ) - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError( - "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - ) - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or ( - use_mtls_endpoint == "auto" and client_cert_source - ): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - def __init__( - self, - *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, CloudBuildTransport]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the cloud build client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, CloudBuildTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - client_options = cast(client_options_lib.ClientOptions, client_options) - - api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( - client_options - ) - - api_key_value = getattr(client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError( - "client_options.api_key and credentials are mutually exclusive" - ) - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - if isinstance(transport, CloudBuildTransport): - # transport is a CloudBuildTransport instance. - if credentials or client_options.credentials_file or api_key_value: - raise ValueError( - "When providing a transport instance, " - "provide its credentials directly." - ) - if client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = transport - else: - import google.auth._default # type: ignore - - if api_key_value and hasattr( - google.auth._default, "get_api_key_credentials" - ): - credentials = google.auth._default.get_api_key_credentials( - api_key_value - ) - - Transport = type(self).get_transport_class(transport) - self._transport = Transport( - credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=client_options.api_audience, - ) - - def __enter__(self) -> "CloudBuildClient": - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - def get_operation( - self, - request: Optional[operations_pb2.GetOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - r"""Gets the latest state of a long-running operation. - - Args: - request (:class:`~.operations_pb2.GetOperationRequest`): - The request object. Request message for - `GetOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.operations_pb2.Operation: - An ``Operation`` object. - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.GetOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def cancel_operation( - self, - request: Optional[operations_pb2.CancelOperationRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running operation. - - The server makes a best effort to cancel the operation, but success - is not guaranteed. If the server doesn't support this method, it returns - `google.rpc.Code.UNIMPLEMENTED`. - - Args: - request (:class:`~.operations_pb2.CancelOperationRequest`): - The request object. Request message for - `CancelOperation` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - None - """ - # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = operations_pb2.CancelOperationRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.cancel_operation, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def set_iam_policy( - self, - request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Sets the IAM access control policy on the specified function. - - Replaces any existing policy. - - Args: - request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): - The request object. Request message for `SetIamPolicy` - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - - **JSON Example** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - - **YAML Example** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the `IAM - developer's - guide `__. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.SetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.set_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_iam_policy( - self, - request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Gets the IAM access control policy for a function. - - Returns an empty policy if the function exists and does not have a - policy set. - - Args: - request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): - The request object. Request message for `GetIamPolicy` - method. - retry (google.api_core.retry.Retry): Designation of what errors, if - any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - - **JSON Example** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - - **YAML Example** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the `IAM - developer's - guide `__. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.GetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def test_iam_permissions( - self, - request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Tests the specified IAM permissions against the IAM access control - policy for a function. - - If the function does not exist, this will return an empty set - of permissions, not a NOT_FOUND error. - - Args: - request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): - The request object. Request message for - `TestIamPermissions` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.iam_policy_pb2.TestIamPermissionsResponse: - Response message for ``TestIamPermissions`` method. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.test_iam_permissions, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=package_version.__version__ -) - - -__all__ = ("CloudBuildClient",) diff --git a/google/cloud/devtools/cloudbuild_v2/services/cloud_build/transports/__init__.py b/google/cloud/devtools/cloudbuild_v2/services/cloud_build/transports/__init__.py deleted file mode 100644 index 0adcb604..00000000 --- a/google/cloud/devtools/cloudbuild_v2/services/cloud_build/transports/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import CloudBuildTransport -from .grpc import CloudBuildGrpcTransport -from .grpc_asyncio import CloudBuildGrpcAsyncIOTransport -from .rest import CloudBuildRestTransport -from .rest import CloudBuildRestInterceptor - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[CloudBuildTransport]] -_transport_registry["grpc"] = CloudBuildGrpcTransport -_transport_registry["grpc_asyncio"] = CloudBuildGrpcAsyncIOTransport -_transport_registry["rest"] = CloudBuildRestTransport - -__all__ = ( - "CloudBuildTransport", - "CloudBuildGrpcTransport", - "CloudBuildGrpcAsyncIOTransport", - "CloudBuildRestTransport", - "CloudBuildRestInterceptor", -) diff --git a/google/cloud/devtools/cloudbuild_v2/services/cloud_build/transports/base.py b/google/cloud/devtools/cloudbuild_v2/services/cloud_build/transports/base.py deleted file mode 100644 index ae7c8272..00000000 --- a/google/cloud/devtools/cloudbuild_v2/services/cloud_build/transports/base.py +++ /dev/null @@ -1,189 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.devtools.cloudbuild_v2 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=package_version.__version__ -) - - -class CloudBuildTransport(abc.ABC): - """Abstract transport class for CloudBuild.""" - - AUTH_SCOPES = () - - DEFAULT_HOST: str = "cloudbuild.googleapis.com" - - def __init__( - self, - *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs( - "'credentials_file' and 'credentials' are mutually exclusive" - ) - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, **scopes_kwargs, quota_project_id=quota_project_id - ) - elif credentials is None: - credentials, _ = google.auth.default( - **scopes_kwargs, quota_project_id=quota_project_id - ) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience( - api_audience if api_audience else host - ) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if ( - always_use_jwt_access - and isinstance(credentials, service_account.Credentials) - and hasattr(service_account.Credentials, "with_always_use_jwt_access") - ): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ":" not in host: - host += ":443" - self._host = host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = {} - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def get_operation( - self, - ) -> Callable[ - [operations_pb2.GetOperationRequest], - Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], - ]: - raise NotImplementedError() - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None,]: - raise NotImplementedError() - - @property - def set_iam_policy( - self, - ) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], - ]: - raise NotImplementedError() - - @property - def get_iam_policy( - self, - ) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], - ]: - raise NotImplementedError() - - @property - def test_iam_permissions( - self, - ) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - Union[ - iam_policy_pb2.TestIamPermissionsResponse, - Awaitable[iam_policy_pb2.TestIamPermissionsResponse], - ], - ]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ("CloudBuildTransport",) diff --git a/google/cloud/devtools/cloudbuild_v2/services/cloud_build/transports/grpc.py b/google/cloud/devtools/cloudbuild_v2/services/cloud_build/transports/grpc.py deleted file mode 100644 index 8f64066b..00000000 --- a/google/cloud/devtools/cloudbuild_v2/services/cloud_build/transports/grpc.py +++ /dev/null @@ -1,357 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore - -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 -from .base import CloudBuildTransport, DEFAULT_CLIENT_INFO - - -class CloudBuildGrpcTransport(CloudBuildTransport): - """gRPC backend transport for CloudBuild. - - Creates and manages builds on Google Cloud Platform. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _stubs: Dict[str, Callable] - - def __init__( - self, - *, - host: str = "cloudbuild.googleapis.com", - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[grpc.Channel] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel( - cls, - host: str = "cloudbuild.googleapis.com", - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs, - ) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs, - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service.""" - return self._grpc_channel - - def close(self): - self.grpc_channel.close() - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC.""" - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC.""" - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def set_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: - r"""Return a callable for the set iam policy method over gRPC. - Sets the IAM access control policy on the specified - function. Replaces any existing policy. - Returns: - Callable[[~.SetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "set_iam_policy" not in self._stubs: - self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/SetIamPolicy", - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["set_iam_policy"] - - @property - def get_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: - r"""Return a callable for the get iam policy method over gRPC. - Gets the IAM access control policy for a function. - Returns an empty policy if the function exists and does - not have a policy set. - Returns: - Callable[[~.GetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_iam_policy" not in self._stubs: - self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/GetIamPolicy", - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["get_iam_policy"] - - @property - def test_iam_permissions( - self, - ) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - iam_policy_pb2.TestIamPermissionsResponse, - ]: - r"""Return a callable for the test iam permissions method over gRPC. - Tests the specified permissions against the IAM access control - policy for a function. If the function does not exist, this will - return an empty set of permissions, not a NOT_FOUND error. - Returns: - Callable[[~.TestIamPermissionsRequest], - ~.TestIamPermissionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "test_iam_permissions" not in self._stubs: - self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/TestIamPermissions", - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs["test_iam_permissions"] - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ("CloudBuildGrpcTransport",) diff --git a/google/cloud/devtools/cloudbuild_v2/services/cloud_build/transports/grpc_asyncio.py b/google/cloud/devtools/cloudbuild_v2/services/cloud_build/transports/grpc_asyncio.py deleted file mode 100644 index 7ebe2942..00000000 --- a/google/cloud/devtools/cloudbuild_v2/services/cloud_build/transports/grpc_asyncio.py +++ /dev/null @@ -1,356 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.location import locations_pb2 # type: ignore -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 -from .base import CloudBuildTransport, DEFAULT_CLIENT_INFO -from .grpc import CloudBuildGrpcTransport - - -class CloudBuildGrpcAsyncIOTransport(CloudBuildTransport): - """gRPC AsyncIO backend transport for CloudBuild. - - Creates and manages builds on Google Cloud Platform. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel( - cls, - host: str = "cloudbuild.googleapis.com", - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs, - ) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs, - ) - - def __init__( - self, - *, - host: str = "cloudbuild.googleapis.com", - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[aio.Channel] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - def close(self): - return self.grpc_channel.close() - - @property - def cancel_operation( - self, - ) -> Callable[[operations_pb2.CancelOperationRequest], None]: - r"""Return a callable for the cancel_operation method over gRPC.""" - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "cancel_operation" not in self._stubs: - self._stubs["cancel_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/CancelOperation", - request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, - response_deserializer=None, - ) - return self._stubs["cancel_operation"] - - @property - def get_operation( - self, - ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: - r"""Return a callable for the get_operation method over gRPC.""" - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_operation" not in self._stubs: - self._stubs["get_operation"] = self.grpc_channel.unary_unary( - "/google.longrunning.Operations/GetOperation", - request_serializer=operations_pb2.GetOperationRequest.SerializeToString, - response_deserializer=operations_pb2.Operation.FromString, - ) - return self._stubs["get_operation"] - - @property - def set_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: - r"""Return a callable for the set iam policy method over gRPC. - Sets the IAM access control policy on the specified - function. Replaces any existing policy. - Returns: - Callable[[~.SetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "set_iam_policy" not in self._stubs: - self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/SetIamPolicy", - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["set_iam_policy"] - - @property - def get_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: - r"""Return a callable for the get iam policy method over gRPC. - Gets the IAM access control policy for a function. - Returns an empty policy if the function exists and does - not have a policy set. - Returns: - Callable[[~.GetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_iam_policy" not in self._stubs: - self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/GetIamPolicy", - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["get_iam_policy"] - - @property - def test_iam_permissions( - self, - ) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - iam_policy_pb2.TestIamPermissionsResponse, - ]: - r"""Return a callable for the test iam permissions method over gRPC. - Tests the specified permissions against the IAM access control - policy for a function. If the function does not exist, this will - return an empty set of permissions, not a NOT_FOUND error. - Returns: - Callable[[~.TestIamPermissionsRequest], - ~.TestIamPermissionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "test_iam_permissions" not in self._stubs: - self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/TestIamPermissions", - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs["test_iam_permissions"] - - -__all__ = ("CloudBuildGrpcAsyncIOTransport",) diff --git a/google/cloud/devtools/cloudbuild_v2/services/cloud_build/transports/rest.py b/google/cloud/devtools/cloudbuild_v2/services/cloud_build/transports/rest.py deleted file mode 100644 index c3dc3468..00000000 --- a/google/cloud/devtools/cloudbuild_v2/services/cloud_build/transports/rest.py +++ /dev/null @@ -1,639 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from google.auth.transport.requests import AuthorizedSession # type: ignore -import json # type: ignore -import grpc # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.api_core import exceptions as core_exceptions -from google.api_core import retry as retries -from google.api_core import rest_helpers -from google.api_core import rest_streaming -from google.api_core import path_template -from google.api_core import gapic_v1 - -from google.protobuf import json_format -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.cloud.location import locations_pb2 # type: ignore -from google.longrunning import operations_pb2 -from requests import __version__ as requests_version -import dataclasses -import re -from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union -import warnings - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - - -from .base import CloudBuildTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, - grpc_version=None, - rest_version=requests_version, -) - - -class CloudBuildRestInterceptor: - """Interceptor for CloudBuild. - - Interceptors are used to manipulate requests, request metadata, and responses - in arbitrary ways. - Example use cases include: - * Logging - * Verifying requests according to service or custom semantics - * Stripping extraneous information from responses - - These use cases and more can be enabled by injecting an - instance of a custom subclass when constructing the CloudBuildRestTransport. - - .. code-block:: python - class MyCustomCloudBuildInterceptor(CloudBuildRestInterceptor): - transport = CloudBuildRestTransport(interceptor=MyCustomCloudBuildInterceptor()) - client = CloudBuildClient(transport=transport) - - - """ - - def pre_get_iam_policy( - self, - request: iam_policy_pb2.GetIamPolicyRequest, - metadata: Sequence[Tuple[str, str]], - ) -> policy_pb2.Policy: - """Pre-rpc interceptor for get_iam_policy - - Override in a subclass to manipulate the request or metadata - before they are sent to the CloudBuild server. - """ - return request, metadata - - def post_get_iam_policy( - self, response: iam_policy_pb2.GetIamPolicyRequest - ) -> policy_pb2.Policy: - """Post-rpc interceptor for get_iam_policy - - Override in a subclass to manipulate the response - after it is returned by the CloudBuild server but before - it is returned to user code. - """ - return response - - def pre_set_iam_policy( - self, - request: iam_policy_pb2.SetIamPolicyRequest, - metadata: Sequence[Tuple[str, str]], - ) -> policy_pb2.Policy: - """Pre-rpc interceptor for set_iam_policy - - Override in a subclass to manipulate the request or metadata - before they are sent to the CloudBuild server. - """ - return request, metadata - - def post_set_iam_policy( - self, response: iam_policy_pb2.SetIamPolicyRequest - ) -> policy_pb2.Policy: - """Post-rpc interceptor for set_iam_policy - - Override in a subclass to manipulate the response - after it is returned by the CloudBuild server but before - it is returned to user code. - """ - return response - - def pre_test_iam_permissions( - self, - request: iam_policy_pb2.TestIamPermissionsRequest, - metadata: Sequence[Tuple[str, str]], - ) -> iam_policy_pb2.TestIamPermissionsResponse: - """Pre-rpc interceptor for test_iam_permissions - - Override in a subclass to manipulate the request or metadata - before they are sent to the CloudBuild server. - """ - return request, metadata - - def post_test_iam_permissions( - self, response: iam_policy_pb2.TestIamPermissionsRequest - ) -> iam_policy_pb2.TestIamPermissionsResponse: - """Post-rpc interceptor for test_iam_permissions - - Override in a subclass to manipulate the response - after it is returned by the CloudBuild server but before - it is returned to user code. - """ - return response - - def pre_cancel_operation( - self, - request: operations_pb2.CancelOperationRequest, - metadata: Sequence[Tuple[str, str]], - ) -> None: - """Pre-rpc interceptor for cancel_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the CloudBuild server. - """ - return request, metadata - - def post_cancel_operation( - self, response: operations_pb2.CancelOperationRequest - ) -> None: - """Post-rpc interceptor for cancel_operation - - Override in a subclass to manipulate the response - after it is returned by the CloudBuild server but before - it is returned to user code. - """ - return response - - def pre_get_operation( - self, - request: operations_pb2.GetOperationRequest, - metadata: Sequence[Tuple[str, str]], - ) -> operations_pb2.Operation: - """Pre-rpc interceptor for get_operation - - Override in a subclass to manipulate the request or metadata - before they are sent to the CloudBuild server. - """ - return request, metadata - - def post_get_operation( - self, response: operations_pb2.GetOperationRequest - ) -> operations_pb2.Operation: - """Post-rpc interceptor for get_operation - - Override in a subclass to manipulate the response - after it is returned by the CloudBuild server but before - it is returned to user code. - """ - return response - - -@dataclasses.dataclass -class CloudBuildRestStub: - _session: AuthorizedSession - _host: str - _interceptor: CloudBuildRestInterceptor - - -class CloudBuildRestTransport(CloudBuildTransport): - """REST backend transport for CloudBuild. - - Creates and manages builds on Google Cloud Platform. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends JSON representations of protocol buffers over HTTP/1.1 - - """ - - def __init__( - self, - *, - host: str = "cloudbuild.googleapis.com", - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - url_scheme: str = "https", - interceptor: Optional[CloudBuildRestInterceptor] = None, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client - certificate to configure mutual TLS HTTP channel. It is ignored - if ``channel`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you are developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - url_scheme: the protocol scheme for the API endpoint. Normally - "https", but for testing or local servers, - "http" can be specified. - """ - # Run the base constructor - # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. - # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the - # credentials object - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError( - f"Unexpected hostname structure: {host}" - ) # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - - super().__init__( - host=host, - credentials=credentials, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - self._session = AuthorizedSession( - self._credentials, default_host=self.DEFAULT_HOST - ) - if client_cert_source_for_mtls: - self._session.configure_mtls_channel(client_cert_source_for_mtls) - self._interceptor = interceptor or CloudBuildRestInterceptor() - self._prep_wrapped_messages(client_info) - - @property - def get_iam_policy(self): - return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore - - class _GetIamPolicy(CloudBuildRestStub): - def __call__( - self, - request: iam_policy_pb2.GetIamPolicyRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - - r"""Call the get iam policy method over HTTP. - - Args: - request (iam_policy_pb2.GetIamPolicyRequest): - The request object for GetIamPolicy method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - policy_pb2.Policy: Response from GetIamPolicy method. - """ - - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v2/{resource=projects/*/locations/*/connections/*}:getIamPolicy", - }, - ] - - request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode(http_options, **request_kwargs) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request["query_params"])) - - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - resp = policy_pb2.Policy() - resp = json_format.Parse(response.content.decode("utf-8"), resp) - resp = self._interceptor.post_get_iam_policy(resp) - return resp - - @property - def set_iam_policy(self): - return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore - - class _SetIamPolicy(CloudBuildRestStub): - def __call__( - self, - request: iam_policy_pb2.SetIamPolicyRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - - r"""Call the set iam policy method over HTTP. - - Args: - request (iam_policy_pb2.SetIamPolicyRequest): - The request object for SetIamPolicy method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - policy_pb2.Policy: Response from SetIamPolicy method. - """ - - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v2/{resource=projects/*/locations/*/connections/*}:setIamPolicy", - "body": "*", - }, - ] - - request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode(http_options, **request_kwargs) - - body = json.loads(json.dumps(transcoded_request["body"])) - uri = transcoded_request["uri"] - method = transcoded_request["method"] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request["query_params"])) - - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - resp = policy_pb2.Policy() - resp = json_format.Parse(response.content.decode("utf-8"), resp) - resp = self._interceptor.post_set_iam_policy(resp) - return resp - - @property - def test_iam_permissions(self): - return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore - - class _TestIamPermissions(CloudBuildRestStub): - def __call__( - self, - request: iam_policy_pb2.TestIamPermissionsRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - - r"""Call the test iam permissions method over HTTP. - - Args: - request (iam_policy_pb2.TestIamPermissionsRequest): - The request object for TestIamPermissions method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - iam_policy_pb2.TestIamPermissionsResponse: Response from TestIamPermissions method. - """ - - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v2/{resource=projects/*/locations/*/connections/*}:testIamPermissions", - "body": "*", - }, - ] - - request, metadata = self._interceptor.pre_test_iam_permissions( - request, metadata - ) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode(http_options, **request_kwargs) - - body = json.loads(json.dumps(transcoded_request["body"])) - uri = transcoded_request["uri"] - method = transcoded_request["method"] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request["query_params"])) - - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - resp = iam_policy_pb2.TestIamPermissionsResponse() - resp = json_format.Parse(response.content.decode("utf-8"), resp) - resp = self._interceptor.post_test_iam_permissions(resp) - return resp - - @property - def cancel_operation(self): - return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore - - class _CancelOperation(CloudBuildRestStub): - def __call__( - self, - request: operations_pb2.CancelOperationRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - - r"""Call the cancel operation method over HTTP. - - Args: - request (operations_pb2.CancelOperationRequest): - The request object for CancelOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v2/{name=projects/*/locations/*/operations/*}:cancel", - "body": "*", - }, - ] - - request, metadata = self._interceptor.pre_cancel_operation( - request, metadata - ) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode(http_options, **request_kwargs) - - body = json.loads(json.dumps(transcoded_request["body"])) - uri = transcoded_request["uri"] - method = transcoded_request["method"] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request["query_params"])) - - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - data=body, - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - return self._interceptor.post_cancel_operation(None) - - @property - def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore - - class _GetOperation(CloudBuildRestStub): - def __call__( - self, - request: operations_pb2.GetOperationRequest, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> operations_pb2.Operation: - - r"""Call the get operation method over HTTP. - - Args: - request (operations_pb2.GetOperationRequest): - The request object for GetOperation method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - operations_pb2.Operation: Response from GetOperation method. - """ - - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v2/{name=projects/*/locations/*/operations/*}", - }, - ] - - request, metadata = self._interceptor.pre_get_operation(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode(http_options, **request_kwargs) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] - - # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request["query_params"])) - - # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - ) - - # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception - # subclass. - if response.status_code >= 400: - raise core_exceptions.from_http_response(response) - - resp = operations_pb2.Operation() - resp = json_format.Parse(response.content.decode("utf-8"), resp) - resp = self._interceptor.post_get_operation(resp) - return resp - - @property - def kind(self) -> str: - return "rest" - - def close(self): - self._session.close() - - -__all__ = ("CloudBuildRestTransport",) diff --git a/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v1.json b/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v1.json index a65fef1d..b11a9f42 100644 --- a/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v1.json +++ b/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-build", - "version": "3.12.0" + "version": "3.13.0" }, "snippets": [ { diff --git a/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v2.json b/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v2.json index d9fb46bb..687d8e30 100644 --- a/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v2.json +++ b/samples/generated_samples/snippet_metadata_google.devtools.cloudbuild.v2.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-build", - "version": "3.12.0" + "version": "3.13.0" }, "snippets": [ { diff --git a/samples/snippets/quickstart.py b/samples/snippets/quickstart.py index ceb5224b..c52bac03 100644 --- a/samples/snippets/quickstart.py +++ b/samples/snippets/quickstart.py @@ -18,13 +18,19 @@ from google.cloud.devtools import cloudbuild_v1 -def quickstart(): - """Create and execute a simple Google Cloud Build configuration, - print the in-progress status and print the completed status.""" +def quickstart(transport: str = None): + """ + Create and execute a simple Google Cloud Build configuration, + print the in-progress status and print the completed status. + + Args: + transport(str): The transport to use. For example, "grpc" + or "rest". If set to None, a transport is chosen automatically. + """ # Authorize the client with Google defaults credentials, project_id = google.auth.default() - client = cloudbuild_v1.services.cloud_build.CloudBuildClient() + client = cloudbuild_v1.services.cloud_build.CloudBuildClient(transport=transport) # If you're using Private Pools or a non-global default pool, add a regional # `api_endpoint` to `CloudBuildClient()` diff --git a/samples/snippets/quickstart_test.py b/samples/snippets/quickstart_test.py index 2d23530f..e3d81361 100644 --- a/samples/snippets/quickstart_test.py +++ b/samples/snippets/quickstart_test.py @@ -13,11 +13,14 @@ # limitations under the License. +import pytest + import quickstart -def test_quickstart(capsys): - quickstart.quickstart() +@pytest.mark.parametrize("transport", ["grpc", "rest"]) +def test_quickstart(capsys, transport: str): + quickstart.quickstart(transport=transport) out, _ = capsys.readouterr() # Prints in-progress message assert "hello world" in out diff --git a/samples/snippets/requirements.txt b/samples/snippets/requirements.txt index 41c97c4c..52702c1d 100644 --- a/samples/snippets/requirements.txt +++ b/samples/snippets/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-build==3.11.1 +google-cloud-build==3.12.0 google-auth==2.16.0 \ No newline at end of file diff --git a/setup.py b/setup.py index 43b21ec3..6c70b2b3 100644 --- a/setup.py +++ b/setup.py @@ -58,9 +58,7 @@ if package.startswith("google") ] -namespaces = ["google"] -if "google.cloud" in packages: - namespaces.append("google.cloud") +namespaces = ["google", "google.cloud", "google.cloud.devtools"] setuptools.setup( name=name, diff --git a/tests/unit/gapic/cloudbuild_v1/test_cloud_build.py b/tests/unit/gapic/cloudbuild_v1/test_cloud_build.py index 439faa9e..3999f393 100644 --- a/tests/unit/gapic/cloudbuild_v1/test_cloud_build.py +++ b/tests/unit/gapic/cloudbuild_v1/test_cloud_build.py @@ -24,10 +24,17 @@ import grpc from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json import math import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format from google.api import httpbody_pb2 # type: ignore from google.api_core import client_options @@ -105,6 +112,7 @@ def test__get_default_mtls_endpoint(): [ (CloudBuildClient, "grpc"), (CloudBuildAsyncClient, "grpc_asyncio"), + (CloudBuildClient, "rest"), ], ) def test_cloud_build_client_from_service_account_info(client_class, transport_name): @@ -118,7 +126,11 @@ def test_cloud_build_client_from_service_account_info(client_class, transport_na assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("cloudbuild.googleapis.com:443") + assert client.transport._host == ( + "cloudbuild.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://cloudbuild.googleapis.com" + ) @pytest.mark.parametrize( @@ -126,6 +138,7 @@ def test_cloud_build_client_from_service_account_info(client_class, transport_na [ (transports.CloudBuildGrpcTransport, "grpc"), (transports.CloudBuildGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.CloudBuildRestTransport, "rest"), ], ) def test_cloud_build_client_service_account_always_use_jwt( @@ -151,6 +164,7 @@ def test_cloud_build_client_service_account_always_use_jwt( [ (CloudBuildClient, "grpc"), (CloudBuildAsyncClient, "grpc_asyncio"), + (CloudBuildClient, "rest"), ], ) def test_cloud_build_client_from_service_account_file(client_class, transport_name): @@ -171,13 +185,18 @@ def test_cloud_build_client_from_service_account_file(client_class, transport_na assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("cloudbuild.googleapis.com:443") + assert client.transport._host == ( + "cloudbuild.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://cloudbuild.googleapis.com" + ) def test_cloud_build_client_get_transport_class(): transport = CloudBuildClient.get_transport_class() available_transports = [ transports.CloudBuildGrpcTransport, + transports.CloudBuildRestTransport, ] assert transport in available_transports @@ -194,6 +213,7 @@ def test_cloud_build_client_get_transport_class(): transports.CloudBuildGrpcAsyncIOTransport, "grpc_asyncio", ), + (CloudBuildClient, transports.CloudBuildRestTransport, "rest"), ], ) @mock.patch.object( @@ -337,6 +357,8 @@ def test_cloud_build_client_client_options( "grpc_asyncio", "false", ), + (CloudBuildClient, transports.CloudBuildRestTransport, "rest", "true"), + (CloudBuildClient, transports.CloudBuildRestTransport, "rest", "false"), ], ) @mock.patch.object( @@ -530,6 +552,7 @@ def test_cloud_build_client_get_mtls_endpoint_and_cert_source(client_class): transports.CloudBuildGrpcAsyncIOTransport, "grpc_asyncio", ), + (CloudBuildClient, transports.CloudBuildRestTransport, "rest"), ], ) def test_cloud_build_client_client_options_scopes( @@ -565,6 +588,7 @@ def test_cloud_build_client_client_options_scopes( "grpc_asyncio", grpc_helpers_async, ), + (CloudBuildClient, transports.CloudBuildRestTransport, "rest", None), ], ) def test_cloud_build_client_client_options_credentials_file( @@ -5823,189 +5847,6573 @@ async def test_list_worker_pools_async_pages(): assert page_.raw_page.next_page_token == token -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.CloudBuildGrpcTransport( +@pytest.mark.parametrize( + "request_type", + [ + cloudbuild.CreateBuildRequest, + dict, + ], +) +def test_create_build_rest(request_type): + client = CloudBuildClient( credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1"} + request_init["build"] = { + "name": "name_value", + "id": "id_value", + "project_id": "project_id_value", + "status": 10, + "status_detail": "status_detail_value", + "source": { + "storage_source": { + "bucket": "bucket_value", + "object_": "object__value", + "generation": 1068, + }, + "repo_source": { + "project_id": "project_id_value", + "repo_name": "repo_name_value", + "branch_name": "branch_name_value", + "tag_name": "tag_name_value", + "commit_sha": "commit_sha_value", + "dir_": "dir__value", + "invert_regex": True, + "substitutions": {}, + }, + "storage_source_manifest": { + "bucket": "bucket_value", + "object_": "object__value", + "generation": 1068, + }, + }, + "steps": [ + { + "name": "name_value", + "env": ["env_value1", "env_value2"], + "args": ["args_value1", "args_value2"], + "dir_": "dir__value", + "id": "id_value", + "wait_for": ["wait_for_value1", "wait_for_value2"], + "entrypoint": "entrypoint_value", + "secret_env": ["secret_env_value1", "secret_env_value2"], + "volumes": [{"name": "name_value", "path": "path_value"}], + "timing": { + "start_time": {"seconds": 751, "nanos": 543}, + "end_time": {}, + }, + "pull_timing": {}, + "timeout": {"seconds": 751, "nanos": 543}, + "status": 10, + "allow_failure": True, + "exit_code": 948, + "allow_exit_codes": [1702, 1703], + "script": "script_value", + } + ], + "results": { + "images": [ + {"name": "name_value", "digest": "digest_value", "push_timing": {}} + ], + "build_step_images": [ + "build_step_images_value1", + "build_step_images_value2", + ], + "artifact_manifest": "artifact_manifest_value", + "num_artifacts": 1392, + "build_step_outputs": [ + b"build_step_outputs_blob1", + b"build_step_outputs_blob2", + ], + "artifact_timing": {}, + "python_packages": [ + { + "uri": "uri_value", + "file_hashes": { + "file_hash": [{"type_": 1, "value": b"value_blob"}] + }, + "push_timing": {}, + } + ], + "maven_artifacts": [ + {"uri": "uri_value", "file_hashes": {}, "push_timing": {}} + ], + }, + "create_time": {}, + "start_time": {}, + "finish_time": {}, + "timeout": {}, + "images": ["images_value1", "images_value2"], + "queue_ttl": {}, + "artifacts": { + "images": ["images_value1", "images_value2"], + "objects": { + "location": "location_value", + "paths": ["paths_value1", "paths_value2"], + "timing": {}, + }, + "maven_artifacts": [ + { + "repository": "repository_value", + "path": "path_value", + "artifact_id": "artifact_id_value", + "group_id": "group_id_value", + "version": "version_value", + } + ], + "python_packages": [ + { + "repository": "repository_value", + "paths": ["paths_value1", "paths_value2"], + } + ], + }, + "logs_bucket": "logs_bucket_value", + "source_provenance": { + "resolved_storage_source": {}, + "resolved_repo_source": {}, + "resolved_storage_source_manifest": {}, + "file_hashes": {}, + }, + "build_trigger_id": "build_trigger_id_value", + "options": { + "source_provenance_hash": [1], + "requested_verify_option": 1, + "machine_type": 1, + "disk_size_gb": 1261, + "substitution_option": 1, + "dynamic_substitutions": True, + "log_streaming_option": 1, + "worker_pool": "worker_pool_value", + "pool": {"name": "name_value"}, + "logging": 1, + "env": ["env_value1", "env_value2"], + "secret_env": ["secret_env_value1", "secret_env_value2"], + "volumes": {}, + }, + "log_url": "log_url_value", + "substitutions": {}, + "tags": ["tags_value1", "tags_value2"], + "secrets": [{"kms_key_name": "kms_key_name_value", "secret_env": {}}], + "timing": {}, + "approval": { + "state": 1, + "config": {"approval_required": True}, + "result": { + "approver_account": "approver_account_value", + "approval_time": {}, + "decision": 1, + "comment": "comment_value", + "url": "url_value", + }, + }, + "service_account": "service_account_value", + "available_secrets": { + "secret_manager": [ + {"version_name": "version_name_value", "env": "env_value"} + ], + "inline": [{"kms_key_name": "kms_key_name_value", "env_map": {}}], + }, + "warnings": [{"text": "text_value", "priority": 1}], + "failure_info": {"type_": 1, "detail": "detail_value"}, + } + request = request_type(**request_init) - # It is an error to provide a credentials file and a transport instance. - transport = transports.CloudBuildGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = CloudBuildClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_build(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_build_rest_required_fields(request_type=cloudbuild.CreateBuildRequest): + transport_class = transports.CloudBuildRestTransport + + request_init = {} + request_init["project_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, ) + ) - # It is an error to provide an api_key and a transport instance. - transport = transports.CloudBuildGrpcTransport( + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_build._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["projectId"] = "project_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_build._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("parent",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "projectId" in jsonified_request + assert jsonified_request["projectId"] == "project_id_value" + + client = CloudBuildClient( credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = CloudBuildClient( - client_options=options, - transport=transport, + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_build(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_build_rest_unset_required_fields(): + transport = transports.CloudBuildRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_build._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("parent",)) + & set( + ( + "projectId", + "build", + ) ) + ) - # It is an error to provide an api_key and a credential. - options = mock.Mock() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = CloudBuildClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() - ) - # It is an error to provide scopes and a transport instance. - transport = transports.CloudBuildGrpcTransport( +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_build_rest_interceptors(null_interceptor): + transport = transports.CloudBuildRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudBuildRestInterceptor(), ) - with pytest.raises(ValueError): - client = CloudBuildClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, + client = CloudBuildClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CloudBuildRestInterceptor, "post_create_build" + ) as post, mock.patch.object( + transports.CloudBuildRestInterceptor, "pre_create_build" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudbuild.CreateBuildRequest.pb(cloudbuild.CreateBuildRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = cloudbuild.CreateBuildRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_build( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) + pre.assert_called_once() + post.assert_called_once() -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.CloudBuildGrpcTransport( + +def test_create_build_rest_bad_request( + transport: str = "rest", request_type=cloudbuild.CreateBuildRequest +): + client = CloudBuildClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - client = CloudBuildClient(transport=transport) - assert client.transport is transport + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1"} + request_init["build"] = { + "name": "name_value", + "id": "id_value", + "project_id": "project_id_value", + "status": 10, + "status_detail": "status_detail_value", + "source": { + "storage_source": { + "bucket": "bucket_value", + "object_": "object__value", + "generation": 1068, + }, + "repo_source": { + "project_id": "project_id_value", + "repo_name": "repo_name_value", + "branch_name": "branch_name_value", + "tag_name": "tag_name_value", + "commit_sha": "commit_sha_value", + "dir_": "dir__value", + "invert_regex": True, + "substitutions": {}, + }, + "storage_source_manifest": { + "bucket": "bucket_value", + "object_": "object__value", + "generation": 1068, + }, + }, + "steps": [ + { + "name": "name_value", + "env": ["env_value1", "env_value2"], + "args": ["args_value1", "args_value2"], + "dir_": "dir__value", + "id": "id_value", + "wait_for": ["wait_for_value1", "wait_for_value2"], + "entrypoint": "entrypoint_value", + "secret_env": ["secret_env_value1", "secret_env_value2"], + "volumes": [{"name": "name_value", "path": "path_value"}], + "timing": { + "start_time": {"seconds": 751, "nanos": 543}, + "end_time": {}, + }, + "pull_timing": {}, + "timeout": {"seconds": 751, "nanos": 543}, + "status": 10, + "allow_failure": True, + "exit_code": 948, + "allow_exit_codes": [1702, 1703], + "script": "script_value", + } + ], + "results": { + "images": [ + {"name": "name_value", "digest": "digest_value", "push_timing": {}} + ], + "build_step_images": [ + "build_step_images_value1", + "build_step_images_value2", + ], + "artifact_manifest": "artifact_manifest_value", + "num_artifacts": 1392, + "build_step_outputs": [ + b"build_step_outputs_blob1", + b"build_step_outputs_blob2", + ], + "artifact_timing": {}, + "python_packages": [ + { + "uri": "uri_value", + "file_hashes": { + "file_hash": [{"type_": 1, "value": b"value_blob"}] + }, + "push_timing": {}, + } + ], + "maven_artifacts": [ + {"uri": "uri_value", "file_hashes": {}, "push_timing": {}} + ], + }, + "create_time": {}, + "start_time": {}, + "finish_time": {}, + "timeout": {}, + "images": ["images_value1", "images_value2"], + "queue_ttl": {}, + "artifacts": { + "images": ["images_value1", "images_value2"], + "objects": { + "location": "location_value", + "paths": ["paths_value1", "paths_value2"], + "timing": {}, + }, + "maven_artifacts": [ + { + "repository": "repository_value", + "path": "path_value", + "artifact_id": "artifact_id_value", + "group_id": "group_id_value", + "version": "version_value", + } + ], + "python_packages": [ + { + "repository": "repository_value", + "paths": ["paths_value1", "paths_value2"], + } + ], + }, + "logs_bucket": "logs_bucket_value", + "source_provenance": { + "resolved_storage_source": {}, + "resolved_repo_source": {}, + "resolved_storage_source_manifest": {}, + "file_hashes": {}, + }, + "build_trigger_id": "build_trigger_id_value", + "options": { + "source_provenance_hash": [1], + "requested_verify_option": 1, + "machine_type": 1, + "disk_size_gb": 1261, + "substitution_option": 1, + "dynamic_substitutions": True, + "log_streaming_option": 1, + "worker_pool": "worker_pool_value", + "pool": {"name": "name_value"}, + "logging": 1, + "env": ["env_value1", "env_value2"], + "secret_env": ["secret_env_value1", "secret_env_value2"], + "volumes": {}, + }, + "log_url": "log_url_value", + "substitutions": {}, + "tags": ["tags_value1", "tags_value2"], + "secrets": [{"kms_key_name": "kms_key_name_value", "secret_env": {}}], + "timing": {}, + "approval": { + "state": 1, + "config": {"approval_required": True}, + "result": { + "approver_account": "approver_account_value", + "approval_time": {}, + "decision": 1, + "comment": "comment_value", + "url": "url_value", + }, + }, + "service_account": "service_account_value", + "available_secrets": { + "secret_manager": [ + {"version_name": "version_name_value", "env": "env_value"} + ], + "inline": [{"kms_key_name": "kms_key_name_value", "env_map": {}}], + }, + "warnings": [{"text": "text_value", "priority": 1}], + "failure_info": {"type_": 1, "detail": "detail_value"}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_build(request) -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.CloudBuildGrpcTransport( + +def test_create_build_rest_flattened(): + client = CloudBuildClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - channel = transport.grpc_channel - assert channel - transport = transports.CloudBuildGrpcAsyncIOTransport( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"project_id": "sample1"} + + # get truthy value for each flattened field + mock_args = dict( + project_id="project_id_value", + build=cloudbuild.Build(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_build(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/projects/{project_id}/builds" % client.transport._host, args[1] + ) + + +def test_create_build_rest_flattened_error(transport: str = "rest"): + client = CloudBuildClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - channel = transport.grpc_channel - assert channel + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_build( + cloudbuild.CreateBuildRequest(), + project_id="project_id_value", + build=cloudbuild.Build(name="name_value"), + ) -@pytest.mark.parametrize( - "transport_class", - [ - transports.CloudBuildGrpcTransport, - transports.CloudBuildGrpcAsyncIOTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() +def test_create_build_rest_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) @pytest.mark.parametrize( - "transport_name", + "request_type", [ - "grpc", + cloudbuild.GetBuildRequest, + dict, ], ) -def test_transport_kind(transport_name): - transport = CloudBuildClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name - - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. +def test_get_build_rest(request_type): client = CloudBuildClient( credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.CloudBuildGrpcTransport, + transport="rest", ) + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "id": "sample2"} + request = request_type(**request_init) -def test_cloud_build_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.CloudBuildTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json", + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloudbuild.Build( + name="name_value", + id="id_value", + project_id="project_id_value", + status=cloudbuild.Build.Status.PENDING, + status_detail="status_detail_value", + images=["images_value"], + logs_bucket="logs_bucket_value", + build_trigger_id="build_trigger_id_value", + log_url="log_url_value", + tags=["tags_value"], + service_account="service_account_value", ) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudbuild.Build.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) -def test_cloud_build_base_transport(): - # Instantiate the base transport. - with mock.patch( - "google.cloud.devtools.cloudbuild_v1.services.cloud_build.transports.CloudBuildTransport.__init__" - ) as Transport: - Transport.return_value = None - transport = transports.CloudBuildTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_build(request) - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - "create_build", - "get_build", - "list_builds", - "cancel_build", - "retry_build", - "approve_build", - "create_build_trigger", - "get_build_trigger", - "list_build_triggers", - "delete_build_trigger", - "update_build_trigger", - "run_build_trigger", - "receive_trigger_webhook", - "create_worker_pool", - "get_worker_pool", - "delete_worker_pool", - "update_worker_pool", - "list_worker_pools", + # Establish that the response is the type that we expect. + assert isinstance(response, cloudbuild.Build) + assert response.name == "name_value" + assert response.id == "id_value" + assert response.project_id == "project_id_value" + assert response.status == cloudbuild.Build.Status.PENDING + assert response.status_detail == "status_detail_value" + assert response.images == ["images_value"] + assert response.logs_bucket == "logs_bucket_value" + assert response.build_trigger_id == "build_trigger_id_value" + assert response.log_url == "log_url_value" + assert response.tags == ["tags_value"] + assert response.service_account == "service_account_value" + + +def test_get_build_rest_required_fields(request_type=cloudbuild.GetBuildRequest): + transport_class = transports.CloudBuildRestTransport + + request_init = {} + request_init["project_id"] = "" + request_init["id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - with pytest.raises(NotImplementedError): - transport.close() + # verify fields with default values are dropped - # Additionally, the LRO client (a property) should - # also raise NotImplementedError - with pytest.raises(NotImplementedError): - transport.operations_client + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_build._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Catch all for all remaining methods and properties - remainder = [ - "kind", - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() + # verify required fields with default values are now present + jsonified_request["projectId"] = "project_id_value" + jsonified_request["id"] = "id_value" -def test_cloud_build_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.devtools.cloudbuild_v1.services.cloud_build.transports.CloudBuildTransport._prep_wrapped_messages" + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_build._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("name",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "projectId" in jsonified_request + assert jsonified_request["projectId"] == "project_id_value" + assert "id" in jsonified_request + assert jsonified_request["id"] == "id_value" + + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloudbuild.Build() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = cloudbuild.Build.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_build(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_build_rest_unset_required_fields(): + transport = transports.CloudBuildRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_build._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("name",)) + & set( + ( + "projectId", + "id", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_build_rest_interceptors(null_interceptor): + transport = transports.CloudBuildRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudBuildRestInterceptor(), + ) + client = CloudBuildClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudBuildRestInterceptor, "post_get_build" + ) as post, mock.patch.object( + transports.CloudBuildRestInterceptor, "pre_get_build" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudbuild.GetBuildRequest.pb(cloudbuild.GetBuildRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloudbuild.Build.to_json(cloudbuild.Build()) + + request = cloudbuild.GetBuildRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloudbuild.Build() + + client.get_build( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_build_rest_bad_request( + transport: str = "rest", request_type=cloudbuild.GetBuildRequest +): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "id": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_build(request) + + +def test_get_build_rest_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloudbuild.Build() + + # get arguments that satisfy an http rule for this method + sample_request = {"project_id": "sample1", "id": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( + project_id="project_id_value", + id="id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudbuild.Build.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_build(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/projects/{project_id}/builds/{id}" % client.transport._host, args[1] + ) + + +def test_get_build_rest_flattened_error(transport: str = "rest"): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_build( + cloudbuild.GetBuildRequest(), + project_id="project_id_value", + id="id_value", + ) + + +def test_get_build_rest_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudbuild.ListBuildsRequest, + dict, + ], +) +def test_list_builds_rest(request_type): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloudbuild.ListBuildsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudbuild.ListBuildsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_builds(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBuildsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_builds_rest_required_fields(request_type=cloudbuild.ListBuildsRequest): + transport_class = transports.CloudBuildRestTransport + + request_init = {} + request_init["project_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_builds._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["projectId"] = "project_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_builds._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "page_size", + "page_token", + "parent", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "projectId" in jsonified_request + assert jsonified_request["projectId"] == "project_id_value" + + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloudbuild.ListBuildsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = cloudbuild.ListBuildsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_builds(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_builds_rest_unset_required_fields(): + transport = transports.CloudBuildRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_builds._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "pageSize", + "pageToken", + "parent", + ) + ) + & set(("projectId",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_builds_rest_interceptors(null_interceptor): + transport = transports.CloudBuildRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudBuildRestInterceptor(), + ) + client = CloudBuildClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudBuildRestInterceptor, "post_list_builds" + ) as post, mock.patch.object( + transports.CloudBuildRestInterceptor, "pre_list_builds" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudbuild.ListBuildsRequest.pb(cloudbuild.ListBuildsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloudbuild.ListBuildsResponse.to_json( + cloudbuild.ListBuildsResponse() + ) + + request = cloudbuild.ListBuildsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloudbuild.ListBuildsResponse() + + client.list_builds( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_builds_rest_bad_request( + transport: str = "rest", request_type=cloudbuild.ListBuildsRequest +): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_builds(request) + + +def test_list_builds_rest_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloudbuild.ListBuildsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"project_id": "sample1"} + + # get truthy value for each flattened field + mock_args = dict( + project_id="project_id_value", + filter="filter_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudbuild.ListBuildsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_builds(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/projects/{project_id}/builds" % client.transport._host, args[1] + ) + + +def test_list_builds_rest_flattened_error(transport: str = "rest"): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_builds( + cloudbuild.ListBuildsRequest(), + project_id="project_id_value", + filter="filter_value", + ) + + +def test_list_builds_rest_pager(transport: str = "rest"): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + cloudbuild.ListBuildsResponse( + builds=[ + cloudbuild.Build(), + cloudbuild.Build(), + cloudbuild.Build(), + ], + next_page_token="abc", + ), + cloudbuild.ListBuildsResponse( + builds=[], + next_page_token="def", + ), + cloudbuild.ListBuildsResponse( + builds=[ + cloudbuild.Build(), + ], + next_page_token="ghi", + ), + cloudbuild.ListBuildsResponse( + builds=[ + cloudbuild.Build(), + cloudbuild.Build(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(cloudbuild.ListBuildsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"project_id": "sample1"} + + pager = client.list_builds(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, cloudbuild.Build) for i in results) + + pages = list(client.list_builds(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + cloudbuild.CancelBuildRequest, + dict, + ], +) +def test_cancel_build_rest(request_type): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "id": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloudbuild.Build( + name="name_value", + id="id_value", + project_id="project_id_value", + status=cloudbuild.Build.Status.PENDING, + status_detail="status_detail_value", + images=["images_value"], + logs_bucket="logs_bucket_value", + build_trigger_id="build_trigger_id_value", + log_url="log_url_value", + tags=["tags_value"], + service_account="service_account_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudbuild.Build.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.cancel_build(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cloudbuild.Build) + assert response.name == "name_value" + assert response.id == "id_value" + assert response.project_id == "project_id_value" + assert response.status == cloudbuild.Build.Status.PENDING + assert response.status_detail == "status_detail_value" + assert response.images == ["images_value"] + assert response.logs_bucket == "logs_bucket_value" + assert response.build_trigger_id == "build_trigger_id_value" + assert response.log_url == "log_url_value" + assert response.tags == ["tags_value"] + assert response.service_account == "service_account_value" + + +def test_cancel_build_rest_required_fields(request_type=cloudbuild.CancelBuildRequest): + transport_class = transports.CloudBuildRestTransport + + request_init = {} + request_init["project_id"] = "" + request_init["id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).cancel_build._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["projectId"] = "project_id_value" + jsonified_request["id"] = "id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).cancel_build._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "projectId" in jsonified_request + assert jsonified_request["projectId"] == "project_id_value" + assert "id" in jsonified_request + assert jsonified_request["id"] == "id_value" + + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloudbuild.Build() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = cloudbuild.Build.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.cancel_build(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_cancel_build_rest_unset_required_fields(): + transport = transports.CloudBuildRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.cancel_build._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "projectId", + "id", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_cancel_build_rest_interceptors(null_interceptor): + transport = transports.CloudBuildRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudBuildRestInterceptor(), + ) + client = CloudBuildClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudBuildRestInterceptor, "post_cancel_build" + ) as post, mock.patch.object( + transports.CloudBuildRestInterceptor, "pre_cancel_build" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudbuild.CancelBuildRequest.pb(cloudbuild.CancelBuildRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloudbuild.Build.to_json(cloudbuild.Build()) + + request = cloudbuild.CancelBuildRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloudbuild.Build() + + client.cancel_build( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_cancel_build_rest_bad_request( + transport: str = "rest", request_type=cloudbuild.CancelBuildRequest +): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "id": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.cancel_build(request) + + +def test_cancel_build_rest_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloudbuild.Build() + + # get arguments that satisfy an http rule for this method + sample_request = {"project_id": "sample1", "id": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( + project_id="project_id_value", + id="id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudbuild.Build.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.cancel_build(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/projects/{project_id}/builds/{id}:cancel" % client.transport._host, + args[1], + ) + + +def test_cancel_build_rest_flattened_error(transport: str = "rest"): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.cancel_build( + cloudbuild.CancelBuildRequest(), + project_id="project_id_value", + id="id_value", + ) + + +def test_cancel_build_rest_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudbuild.RetryBuildRequest, + dict, + ], +) +def test_retry_build_rest(request_type): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "id": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.retry_build(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_retry_build_rest_required_fields(request_type=cloudbuild.RetryBuildRequest): + transport_class = transports.CloudBuildRestTransport + + request_init = {} + request_init["project_id"] = "" + request_init["id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).retry_build._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["projectId"] = "project_id_value" + jsonified_request["id"] = "id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).retry_build._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "projectId" in jsonified_request + assert jsonified_request["projectId"] == "project_id_value" + assert "id" in jsonified_request + assert jsonified_request["id"] == "id_value" + + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.retry_build(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_retry_build_rest_unset_required_fields(): + transport = transports.CloudBuildRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.retry_build._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "projectId", + "id", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_retry_build_rest_interceptors(null_interceptor): + transport = transports.CloudBuildRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudBuildRestInterceptor(), + ) + client = CloudBuildClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CloudBuildRestInterceptor, "post_retry_build" + ) as post, mock.patch.object( + transports.CloudBuildRestInterceptor, "pre_retry_build" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudbuild.RetryBuildRequest.pb(cloudbuild.RetryBuildRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = cloudbuild.RetryBuildRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.retry_build( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_retry_build_rest_bad_request( + transport: str = "rest", request_type=cloudbuild.RetryBuildRequest +): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "id": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.retry_build(request) + + +def test_retry_build_rest_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"project_id": "sample1", "id": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( + project_id="project_id_value", + id="id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.retry_build(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/projects/{project_id}/builds/{id}:retry" % client.transport._host, + args[1], + ) + + +def test_retry_build_rest_flattened_error(transport: str = "rest"): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.retry_build( + cloudbuild.RetryBuildRequest(), + project_id="project_id_value", + id="id_value", + ) + + +def test_retry_build_rest_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudbuild.ApproveBuildRequest, + dict, + ], +) +def test_approve_build_rest(request_type): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/builds/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.approve_build(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_approve_build_rest_required_fields( + request_type=cloudbuild.ApproveBuildRequest, +): + transport_class = transports.CloudBuildRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).approve_build._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).approve_build._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.approve_build(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_approve_build_rest_unset_required_fields(): + transport = transports.CloudBuildRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.approve_build._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_approve_build_rest_interceptors(null_interceptor): + transport = transports.CloudBuildRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudBuildRestInterceptor(), + ) + client = CloudBuildClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CloudBuildRestInterceptor, "post_approve_build" + ) as post, mock.patch.object( + transports.CloudBuildRestInterceptor, "pre_approve_build" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudbuild.ApproveBuildRequest.pb(cloudbuild.ApproveBuildRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = cloudbuild.ApproveBuildRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.approve_build( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_approve_build_rest_bad_request( + transport: str = "rest", request_type=cloudbuild.ApproveBuildRequest +): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/builds/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.approve_build(request) + + +def test_approve_build_rest_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/builds/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + approval_result=cloudbuild.ApprovalResult( + approver_account="approver_account_value" + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.approve_build(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/builds/*}:approve" % client.transport._host, args[1] + ) + + +def test_approve_build_rest_flattened_error(transport: str = "rest"): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.approve_build( + cloudbuild.ApproveBuildRequest(), + name="name_value", + approval_result=cloudbuild.ApprovalResult( + approver_account="approver_account_value" + ), + ) + + +def test_approve_build_rest_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudbuild.CreateBuildTriggerRequest, + dict, + ], +) +def test_create_build_trigger_rest(request_type): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1"} + request_init["trigger"] = { + "resource_name": "resource_name_value", + "id": "id_value", + "description": "description_value", + "name": "name_value", + "tags": ["tags_value1", "tags_value2"], + "trigger_template": { + "project_id": "project_id_value", + "repo_name": "repo_name_value", + "branch_name": "branch_name_value", + "tag_name": "tag_name_value", + "commit_sha": "commit_sha_value", + "dir_": "dir__value", + "invert_regex": True, + "substitutions": {}, + }, + "github": { + "installation_id": 1598, + "owner": "owner_value", + "name": "name_value", + "pull_request": { + "branch": "branch_value", + "comment_control": 1, + "invert_regex": True, + }, + "push": { + "branch": "branch_value", + "tag": "tag_value", + "invert_regex": True, + }, + }, + "pubsub_config": { + "subscription": "subscription_value", + "topic": "topic_value", + "service_account_email": "service_account_email_value", + "state": 1, + }, + "webhook_config": {"secret": "secret_value", "state": 1}, + "autodetect": True, + "build": { + "name": "name_value", + "id": "id_value", + "project_id": "project_id_value", + "status": 10, + "status_detail": "status_detail_value", + "source": { + "storage_source": { + "bucket": "bucket_value", + "object_": "object__value", + "generation": 1068, + }, + "repo_source": {}, + "storage_source_manifest": { + "bucket": "bucket_value", + "object_": "object__value", + "generation": 1068, + }, + }, + "steps": [ + { + "name": "name_value", + "env": ["env_value1", "env_value2"], + "args": ["args_value1", "args_value2"], + "dir_": "dir__value", + "id": "id_value", + "wait_for": ["wait_for_value1", "wait_for_value2"], + "entrypoint": "entrypoint_value", + "secret_env": ["secret_env_value1", "secret_env_value2"], + "volumes": [{"name": "name_value", "path": "path_value"}], + "timing": { + "start_time": {"seconds": 751, "nanos": 543}, + "end_time": {}, + }, + "pull_timing": {}, + "timeout": {"seconds": 751, "nanos": 543}, + "status": 10, + "allow_failure": True, + "exit_code": 948, + "allow_exit_codes": [1702, 1703], + "script": "script_value", + } + ], + "results": { + "images": [ + {"name": "name_value", "digest": "digest_value", "push_timing": {}} + ], + "build_step_images": [ + "build_step_images_value1", + "build_step_images_value2", + ], + "artifact_manifest": "artifact_manifest_value", + "num_artifacts": 1392, + "build_step_outputs": [ + b"build_step_outputs_blob1", + b"build_step_outputs_blob2", + ], + "artifact_timing": {}, + "python_packages": [ + { + "uri": "uri_value", + "file_hashes": { + "file_hash": [{"type_": 1, "value": b"value_blob"}] + }, + "push_timing": {}, + } + ], + "maven_artifacts": [ + {"uri": "uri_value", "file_hashes": {}, "push_timing": {}} + ], + }, + "create_time": {}, + "start_time": {}, + "finish_time": {}, + "timeout": {}, + "images": ["images_value1", "images_value2"], + "queue_ttl": {}, + "artifacts": { + "images": ["images_value1", "images_value2"], + "objects": { + "location": "location_value", + "paths": ["paths_value1", "paths_value2"], + "timing": {}, + }, + "maven_artifacts": [ + { + "repository": "repository_value", + "path": "path_value", + "artifact_id": "artifact_id_value", + "group_id": "group_id_value", + "version": "version_value", + } + ], + "python_packages": [ + { + "repository": "repository_value", + "paths": ["paths_value1", "paths_value2"], + } + ], + }, + "logs_bucket": "logs_bucket_value", + "source_provenance": { + "resolved_storage_source": {}, + "resolved_repo_source": {}, + "resolved_storage_source_manifest": {}, + "file_hashes": {}, + }, + "build_trigger_id": "build_trigger_id_value", + "options": { + "source_provenance_hash": [1], + "requested_verify_option": 1, + "machine_type": 1, + "disk_size_gb": 1261, + "substitution_option": 1, + "dynamic_substitutions": True, + "log_streaming_option": 1, + "worker_pool": "worker_pool_value", + "pool": {"name": "name_value"}, + "logging": 1, + "env": ["env_value1", "env_value2"], + "secret_env": ["secret_env_value1", "secret_env_value2"], + "volumes": {}, + }, + "log_url": "log_url_value", + "substitutions": {}, + "tags": ["tags_value1", "tags_value2"], + "secrets": [{"kms_key_name": "kms_key_name_value", "secret_env": {}}], + "timing": {}, + "approval": { + "state": 1, + "config": {"approval_required": True}, + "result": { + "approver_account": "approver_account_value", + "approval_time": {}, + "decision": 1, + "comment": "comment_value", + "url": "url_value", + }, + }, + "service_account": "service_account_value", + "available_secrets": { + "secret_manager": [ + {"version_name": "version_name_value", "env": "env_value"} + ], + "inline": [{"kms_key_name": "kms_key_name_value", "env_map": {}}], + }, + "warnings": [{"text": "text_value", "priority": 1}], + "failure_info": {"type_": 1, "detail": "detail_value"}, + }, + "filename": "filename_value", + "create_time": {}, + "disabled": True, + "substitutions": {}, + "ignored_files": ["ignored_files_value1", "ignored_files_value2"], + "included_files": ["included_files_value1", "included_files_value2"], + "filter": "filter_value", + "service_account": "service_account_value", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloudbuild.BuildTrigger( + resource_name="resource_name_value", + id="id_value", + description="description_value", + name="name_value", + tags=["tags_value"], + disabled=True, + ignored_files=["ignored_files_value"], + included_files=["included_files_value"], + filter="filter_value", + service_account="service_account_value", + autodetect=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudbuild.BuildTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_build_trigger(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cloudbuild.BuildTrigger) + assert response.resource_name == "resource_name_value" + assert response.id == "id_value" + assert response.description == "description_value" + assert response.name == "name_value" + assert response.tags == ["tags_value"] + assert response.disabled is True + assert response.ignored_files == ["ignored_files_value"] + assert response.included_files == ["included_files_value"] + assert response.filter == "filter_value" + assert response.service_account == "service_account_value" + + +def test_create_build_trigger_rest_required_fields( + request_type=cloudbuild.CreateBuildTriggerRequest, +): + transport_class = transports.CloudBuildRestTransport + + request_init = {} + request_init["project_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_build_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["projectId"] = "project_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_build_trigger._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("parent",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "projectId" in jsonified_request + assert jsonified_request["projectId"] == "project_id_value" + + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloudbuild.BuildTrigger() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = cloudbuild.BuildTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_build_trigger(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_build_trigger_rest_unset_required_fields(): + transport = transports.CloudBuildRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_build_trigger._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("parent",)) + & set( + ( + "projectId", + "trigger", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_build_trigger_rest_interceptors(null_interceptor): + transport = transports.CloudBuildRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudBuildRestInterceptor(), + ) + client = CloudBuildClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudBuildRestInterceptor, "post_create_build_trigger" + ) as post, mock.patch.object( + transports.CloudBuildRestInterceptor, "pre_create_build_trigger" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudbuild.CreateBuildTriggerRequest.pb( + cloudbuild.CreateBuildTriggerRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloudbuild.BuildTrigger.to_json( + cloudbuild.BuildTrigger() + ) + + request = cloudbuild.CreateBuildTriggerRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloudbuild.BuildTrigger() + + client.create_build_trigger( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_build_trigger_rest_bad_request( + transport: str = "rest", request_type=cloudbuild.CreateBuildTriggerRequest +): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1"} + request_init["trigger"] = { + "resource_name": "resource_name_value", + "id": "id_value", + "description": "description_value", + "name": "name_value", + "tags": ["tags_value1", "tags_value2"], + "trigger_template": { + "project_id": "project_id_value", + "repo_name": "repo_name_value", + "branch_name": "branch_name_value", + "tag_name": "tag_name_value", + "commit_sha": "commit_sha_value", + "dir_": "dir__value", + "invert_regex": True, + "substitutions": {}, + }, + "github": { + "installation_id": 1598, + "owner": "owner_value", + "name": "name_value", + "pull_request": { + "branch": "branch_value", + "comment_control": 1, + "invert_regex": True, + }, + "push": { + "branch": "branch_value", + "tag": "tag_value", + "invert_regex": True, + }, + }, + "pubsub_config": { + "subscription": "subscription_value", + "topic": "topic_value", + "service_account_email": "service_account_email_value", + "state": 1, + }, + "webhook_config": {"secret": "secret_value", "state": 1}, + "autodetect": True, + "build": { + "name": "name_value", + "id": "id_value", + "project_id": "project_id_value", + "status": 10, + "status_detail": "status_detail_value", + "source": { + "storage_source": { + "bucket": "bucket_value", + "object_": "object__value", + "generation": 1068, + }, + "repo_source": {}, + "storage_source_manifest": { + "bucket": "bucket_value", + "object_": "object__value", + "generation": 1068, + }, + }, + "steps": [ + { + "name": "name_value", + "env": ["env_value1", "env_value2"], + "args": ["args_value1", "args_value2"], + "dir_": "dir__value", + "id": "id_value", + "wait_for": ["wait_for_value1", "wait_for_value2"], + "entrypoint": "entrypoint_value", + "secret_env": ["secret_env_value1", "secret_env_value2"], + "volumes": [{"name": "name_value", "path": "path_value"}], + "timing": { + "start_time": {"seconds": 751, "nanos": 543}, + "end_time": {}, + }, + "pull_timing": {}, + "timeout": {"seconds": 751, "nanos": 543}, + "status": 10, + "allow_failure": True, + "exit_code": 948, + "allow_exit_codes": [1702, 1703], + "script": "script_value", + } + ], + "results": { + "images": [ + {"name": "name_value", "digest": "digest_value", "push_timing": {}} + ], + "build_step_images": [ + "build_step_images_value1", + "build_step_images_value2", + ], + "artifact_manifest": "artifact_manifest_value", + "num_artifacts": 1392, + "build_step_outputs": [ + b"build_step_outputs_blob1", + b"build_step_outputs_blob2", + ], + "artifact_timing": {}, + "python_packages": [ + { + "uri": "uri_value", + "file_hashes": { + "file_hash": [{"type_": 1, "value": b"value_blob"}] + }, + "push_timing": {}, + } + ], + "maven_artifacts": [ + {"uri": "uri_value", "file_hashes": {}, "push_timing": {}} + ], + }, + "create_time": {}, + "start_time": {}, + "finish_time": {}, + "timeout": {}, + "images": ["images_value1", "images_value2"], + "queue_ttl": {}, + "artifacts": { + "images": ["images_value1", "images_value2"], + "objects": { + "location": "location_value", + "paths": ["paths_value1", "paths_value2"], + "timing": {}, + }, + "maven_artifacts": [ + { + "repository": "repository_value", + "path": "path_value", + "artifact_id": "artifact_id_value", + "group_id": "group_id_value", + "version": "version_value", + } + ], + "python_packages": [ + { + "repository": "repository_value", + "paths": ["paths_value1", "paths_value2"], + } + ], + }, + "logs_bucket": "logs_bucket_value", + "source_provenance": { + "resolved_storage_source": {}, + "resolved_repo_source": {}, + "resolved_storage_source_manifest": {}, + "file_hashes": {}, + }, + "build_trigger_id": "build_trigger_id_value", + "options": { + "source_provenance_hash": [1], + "requested_verify_option": 1, + "machine_type": 1, + "disk_size_gb": 1261, + "substitution_option": 1, + "dynamic_substitutions": True, + "log_streaming_option": 1, + "worker_pool": "worker_pool_value", + "pool": {"name": "name_value"}, + "logging": 1, + "env": ["env_value1", "env_value2"], + "secret_env": ["secret_env_value1", "secret_env_value2"], + "volumes": {}, + }, + "log_url": "log_url_value", + "substitutions": {}, + "tags": ["tags_value1", "tags_value2"], + "secrets": [{"kms_key_name": "kms_key_name_value", "secret_env": {}}], + "timing": {}, + "approval": { + "state": 1, + "config": {"approval_required": True}, + "result": { + "approver_account": "approver_account_value", + "approval_time": {}, + "decision": 1, + "comment": "comment_value", + "url": "url_value", + }, + }, + "service_account": "service_account_value", + "available_secrets": { + "secret_manager": [ + {"version_name": "version_name_value", "env": "env_value"} + ], + "inline": [{"kms_key_name": "kms_key_name_value", "env_map": {}}], + }, + "warnings": [{"text": "text_value", "priority": 1}], + "failure_info": {"type_": 1, "detail": "detail_value"}, + }, + "filename": "filename_value", + "create_time": {}, + "disabled": True, + "substitutions": {}, + "ignored_files": ["ignored_files_value1", "ignored_files_value2"], + "included_files": ["included_files_value1", "included_files_value2"], + "filter": "filter_value", + "service_account": "service_account_value", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_build_trigger(request) + + +def test_create_build_trigger_rest_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloudbuild.BuildTrigger() + + # get arguments that satisfy an http rule for this method + sample_request = {"project_id": "sample1"} + + # get truthy value for each flattened field + mock_args = dict( + project_id="project_id_value", + trigger=cloudbuild.BuildTrigger(resource_name="resource_name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudbuild.BuildTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_build_trigger(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/projects/{project_id}/triggers" % client.transport._host, args[1] + ) + + +def test_create_build_trigger_rest_flattened_error(transport: str = "rest"): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_build_trigger( + cloudbuild.CreateBuildTriggerRequest(), + project_id="project_id_value", + trigger=cloudbuild.BuildTrigger(resource_name="resource_name_value"), + ) + + +def test_create_build_trigger_rest_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudbuild.GetBuildTriggerRequest, + dict, + ], +) +def test_get_build_trigger_rest(request_type): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "trigger_id": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloudbuild.BuildTrigger( + resource_name="resource_name_value", + id="id_value", + description="description_value", + name="name_value", + tags=["tags_value"], + disabled=True, + ignored_files=["ignored_files_value"], + included_files=["included_files_value"], + filter="filter_value", + service_account="service_account_value", + autodetect=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudbuild.BuildTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_build_trigger(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cloudbuild.BuildTrigger) + assert response.resource_name == "resource_name_value" + assert response.id == "id_value" + assert response.description == "description_value" + assert response.name == "name_value" + assert response.tags == ["tags_value"] + assert response.disabled is True + assert response.ignored_files == ["ignored_files_value"] + assert response.included_files == ["included_files_value"] + assert response.filter == "filter_value" + assert response.service_account == "service_account_value" + + +def test_get_build_trigger_rest_required_fields( + request_type=cloudbuild.GetBuildTriggerRequest, +): + transport_class = transports.CloudBuildRestTransport + + request_init = {} + request_init["project_id"] = "" + request_init["trigger_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_build_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["projectId"] = "project_id_value" + jsonified_request["triggerId"] = "trigger_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_build_trigger._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("name",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "projectId" in jsonified_request + assert jsonified_request["projectId"] == "project_id_value" + assert "triggerId" in jsonified_request + assert jsonified_request["triggerId"] == "trigger_id_value" + + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloudbuild.BuildTrigger() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = cloudbuild.BuildTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_build_trigger(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_build_trigger_rest_unset_required_fields(): + transport = transports.CloudBuildRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_build_trigger._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("name",)) + & set( + ( + "projectId", + "triggerId", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_build_trigger_rest_interceptors(null_interceptor): + transport = transports.CloudBuildRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudBuildRestInterceptor(), + ) + client = CloudBuildClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudBuildRestInterceptor, "post_get_build_trigger" + ) as post, mock.patch.object( + transports.CloudBuildRestInterceptor, "pre_get_build_trigger" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudbuild.GetBuildTriggerRequest.pb( + cloudbuild.GetBuildTriggerRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloudbuild.BuildTrigger.to_json( + cloudbuild.BuildTrigger() + ) + + request = cloudbuild.GetBuildTriggerRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloudbuild.BuildTrigger() + + client.get_build_trigger( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_build_trigger_rest_bad_request( + transport: str = "rest", request_type=cloudbuild.GetBuildTriggerRequest +): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "trigger_id": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_build_trigger(request) + + +def test_get_build_trigger_rest_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloudbuild.BuildTrigger() + + # get arguments that satisfy an http rule for this method + sample_request = {"project_id": "sample1", "trigger_id": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( + project_id="project_id_value", + trigger_id="trigger_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudbuild.BuildTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_build_trigger(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/projects/{project_id}/triggers/{trigger_id}" + % client.transport._host, + args[1], + ) + + +def test_get_build_trigger_rest_flattened_error(transport: str = "rest"): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_build_trigger( + cloudbuild.GetBuildTriggerRequest(), + project_id="project_id_value", + trigger_id="trigger_id_value", + ) + + +def test_get_build_trigger_rest_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudbuild.ListBuildTriggersRequest, + dict, + ], +) +def test_list_build_triggers_rest(request_type): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloudbuild.ListBuildTriggersResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudbuild.ListBuildTriggersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_build_triggers(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBuildTriggersPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_build_triggers_rest_required_fields( + request_type=cloudbuild.ListBuildTriggersRequest, +): + transport_class = transports.CloudBuildRestTransport + + request_init = {} + request_init["project_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_build_triggers._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["projectId"] = "project_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_build_triggers._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + "parent", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "projectId" in jsonified_request + assert jsonified_request["projectId"] == "project_id_value" + + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloudbuild.ListBuildTriggersResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = cloudbuild.ListBuildTriggersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_build_triggers(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_build_triggers_rest_unset_required_fields(): + transport = transports.CloudBuildRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_build_triggers._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + "parent", + ) + ) + & set(("projectId",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_build_triggers_rest_interceptors(null_interceptor): + transport = transports.CloudBuildRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudBuildRestInterceptor(), + ) + client = CloudBuildClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudBuildRestInterceptor, "post_list_build_triggers" + ) as post, mock.patch.object( + transports.CloudBuildRestInterceptor, "pre_list_build_triggers" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudbuild.ListBuildTriggersRequest.pb( + cloudbuild.ListBuildTriggersRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloudbuild.ListBuildTriggersResponse.to_json( + cloudbuild.ListBuildTriggersResponse() + ) + + request = cloudbuild.ListBuildTriggersRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloudbuild.ListBuildTriggersResponse() + + client.list_build_triggers( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_build_triggers_rest_bad_request( + transport: str = "rest", request_type=cloudbuild.ListBuildTriggersRequest +): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_build_triggers(request) + + +def test_list_build_triggers_rest_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloudbuild.ListBuildTriggersResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"project_id": "sample1"} + + # get truthy value for each flattened field + mock_args = dict( + project_id="project_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudbuild.ListBuildTriggersResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_build_triggers(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/projects/{project_id}/triggers" % client.transport._host, args[1] + ) + + +def test_list_build_triggers_rest_flattened_error(transport: str = "rest"): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_build_triggers( + cloudbuild.ListBuildTriggersRequest(), + project_id="project_id_value", + ) + + +def test_list_build_triggers_rest_pager(transport: str = "rest"): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + cloudbuild.ListBuildTriggersResponse( + triggers=[ + cloudbuild.BuildTrigger(), + cloudbuild.BuildTrigger(), + cloudbuild.BuildTrigger(), + ], + next_page_token="abc", + ), + cloudbuild.ListBuildTriggersResponse( + triggers=[], + next_page_token="def", + ), + cloudbuild.ListBuildTriggersResponse( + triggers=[ + cloudbuild.BuildTrigger(), + ], + next_page_token="ghi", + ), + cloudbuild.ListBuildTriggersResponse( + triggers=[ + cloudbuild.BuildTrigger(), + cloudbuild.BuildTrigger(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + cloudbuild.ListBuildTriggersResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"project_id": "sample1"} + + pager = client.list_build_triggers(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, cloudbuild.BuildTrigger) for i in results) + + pages = list(client.list_build_triggers(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + cloudbuild.DeleteBuildTriggerRequest, + dict, + ], +) +def test_delete_build_trigger_rest(request_type): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "trigger_id": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_build_trigger(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_build_trigger_rest_required_fields( + request_type=cloudbuild.DeleteBuildTriggerRequest, +): + transport_class = transports.CloudBuildRestTransport + + request_init = {} + request_init["project_id"] = "" + request_init["trigger_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_build_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["projectId"] = "project_id_value" + jsonified_request["triggerId"] = "trigger_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_build_trigger._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("name",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "projectId" in jsonified_request + assert jsonified_request["projectId"] == "project_id_value" + assert "triggerId" in jsonified_request + assert jsonified_request["triggerId"] == "trigger_id_value" + + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_build_trigger(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_build_trigger_rest_unset_required_fields(): + transport = transports.CloudBuildRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_build_trigger._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("name",)) + & set( + ( + "projectId", + "triggerId", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_build_trigger_rest_interceptors(null_interceptor): + transport = transports.CloudBuildRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudBuildRestInterceptor(), + ) + client = CloudBuildClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudBuildRestInterceptor, "pre_delete_build_trigger" + ) as pre: + pre.assert_not_called() + pb_message = cloudbuild.DeleteBuildTriggerRequest.pb( + cloudbuild.DeleteBuildTriggerRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = cloudbuild.DeleteBuildTriggerRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_build_trigger( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_build_trigger_rest_bad_request( + transport: str = "rest", request_type=cloudbuild.DeleteBuildTriggerRequest +): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "trigger_id": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_build_trigger(request) + + +def test_delete_build_trigger_rest_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"project_id": "sample1", "trigger_id": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( + project_id="project_id_value", + trigger_id="trigger_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_build_trigger(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/projects/{project_id}/triggers/{trigger_id}" + % client.transport._host, + args[1], + ) + + +def test_delete_build_trigger_rest_flattened_error(transport: str = "rest"): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_build_trigger( + cloudbuild.DeleteBuildTriggerRequest(), + project_id="project_id_value", + trigger_id="trigger_id_value", + ) + + +def test_delete_build_trigger_rest_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudbuild.UpdateBuildTriggerRequest, + dict, + ], +) +def test_update_build_trigger_rest(request_type): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "trigger_id": "sample2"} + request_init["trigger"] = { + "resource_name": "resource_name_value", + "id": "id_value", + "description": "description_value", + "name": "name_value", + "tags": ["tags_value1", "tags_value2"], + "trigger_template": { + "project_id": "project_id_value", + "repo_name": "repo_name_value", + "branch_name": "branch_name_value", + "tag_name": "tag_name_value", + "commit_sha": "commit_sha_value", + "dir_": "dir__value", + "invert_regex": True, + "substitutions": {}, + }, + "github": { + "installation_id": 1598, + "owner": "owner_value", + "name": "name_value", + "pull_request": { + "branch": "branch_value", + "comment_control": 1, + "invert_regex": True, + }, + "push": { + "branch": "branch_value", + "tag": "tag_value", + "invert_regex": True, + }, + }, + "pubsub_config": { + "subscription": "subscription_value", + "topic": "topic_value", + "service_account_email": "service_account_email_value", + "state": 1, + }, + "webhook_config": {"secret": "secret_value", "state": 1}, + "autodetect": True, + "build": { + "name": "name_value", + "id": "id_value", + "project_id": "project_id_value", + "status": 10, + "status_detail": "status_detail_value", + "source": { + "storage_source": { + "bucket": "bucket_value", + "object_": "object__value", + "generation": 1068, + }, + "repo_source": {}, + "storage_source_manifest": { + "bucket": "bucket_value", + "object_": "object__value", + "generation": 1068, + }, + }, + "steps": [ + { + "name": "name_value", + "env": ["env_value1", "env_value2"], + "args": ["args_value1", "args_value2"], + "dir_": "dir__value", + "id": "id_value", + "wait_for": ["wait_for_value1", "wait_for_value2"], + "entrypoint": "entrypoint_value", + "secret_env": ["secret_env_value1", "secret_env_value2"], + "volumes": [{"name": "name_value", "path": "path_value"}], + "timing": { + "start_time": {"seconds": 751, "nanos": 543}, + "end_time": {}, + }, + "pull_timing": {}, + "timeout": {"seconds": 751, "nanos": 543}, + "status": 10, + "allow_failure": True, + "exit_code": 948, + "allow_exit_codes": [1702, 1703], + "script": "script_value", + } + ], + "results": { + "images": [ + {"name": "name_value", "digest": "digest_value", "push_timing": {}} + ], + "build_step_images": [ + "build_step_images_value1", + "build_step_images_value2", + ], + "artifact_manifest": "artifact_manifest_value", + "num_artifacts": 1392, + "build_step_outputs": [ + b"build_step_outputs_blob1", + b"build_step_outputs_blob2", + ], + "artifact_timing": {}, + "python_packages": [ + { + "uri": "uri_value", + "file_hashes": { + "file_hash": [{"type_": 1, "value": b"value_blob"}] + }, + "push_timing": {}, + } + ], + "maven_artifacts": [ + {"uri": "uri_value", "file_hashes": {}, "push_timing": {}} + ], + }, + "create_time": {}, + "start_time": {}, + "finish_time": {}, + "timeout": {}, + "images": ["images_value1", "images_value2"], + "queue_ttl": {}, + "artifacts": { + "images": ["images_value1", "images_value2"], + "objects": { + "location": "location_value", + "paths": ["paths_value1", "paths_value2"], + "timing": {}, + }, + "maven_artifacts": [ + { + "repository": "repository_value", + "path": "path_value", + "artifact_id": "artifact_id_value", + "group_id": "group_id_value", + "version": "version_value", + } + ], + "python_packages": [ + { + "repository": "repository_value", + "paths": ["paths_value1", "paths_value2"], + } + ], + }, + "logs_bucket": "logs_bucket_value", + "source_provenance": { + "resolved_storage_source": {}, + "resolved_repo_source": {}, + "resolved_storage_source_manifest": {}, + "file_hashes": {}, + }, + "build_trigger_id": "build_trigger_id_value", + "options": { + "source_provenance_hash": [1], + "requested_verify_option": 1, + "machine_type": 1, + "disk_size_gb": 1261, + "substitution_option": 1, + "dynamic_substitutions": True, + "log_streaming_option": 1, + "worker_pool": "worker_pool_value", + "pool": {"name": "name_value"}, + "logging": 1, + "env": ["env_value1", "env_value2"], + "secret_env": ["secret_env_value1", "secret_env_value2"], + "volumes": {}, + }, + "log_url": "log_url_value", + "substitutions": {}, + "tags": ["tags_value1", "tags_value2"], + "secrets": [{"kms_key_name": "kms_key_name_value", "secret_env": {}}], + "timing": {}, + "approval": { + "state": 1, + "config": {"approval_required": True}, + "result": { + "approver_account": "approver_account_value", + "approval_time": {}, + "decision": 1, + "comment": "comment_value", + "url": "url_value", + }, + }, + "service_account": "service_account_value", + "available_secrets": { + "secret_manager": [ + {"version_name": "version_name_value", "env": "env_value"} + ], + "inline": [{"kms_key_name": "kms_key_name_value", "env_map": {}}], + }, + "warnings": [{"text": "text_value", "priority": 1}], + "failure_info": {"type_": 1, "detail": "detail_value"}, + }, + "filename": "filename_value", + "create_time": {}, + "disabled": True, + "substitutions": {}, + "ignored_files": ["ignored_files_value1", "ignored_files_value2"], + "included_files": ["included_files_value1", "included_files_value2"], + "filter": "filter_value", + "service_account": "service_account_value", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloudbuild.BuildTrigger( + resource_name="resource_name_value", + id="id_value", + description="description_value", + name="name_value", + tags=["tags_value"], + disabled=True, + ignored_files=["ignored_files_value"], + included_files=["included_files_value"], + filter="filter_value", + service_account="service_account_value", + autodetect=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudbuild.BuildTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_build_trigger(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cloudbuild.BuildTrigger) + assert response.resource_name == "resource_name_value" + assert response.id == "id_value" + assert response.description == "description_value" + assert response.name == "name_value" + assert response.tags == ["tags_value"] + assert response.disabled is True + assert response.ignored_files == ["ignored_files_value"] + assert response.included_files == ["included_files_value"] + assert response.filter == "filter_value" + assert response.service_account == "service_account_value" + + +def test_update_build_trigger_rest_required_fields( + request_type=cloudbuild.UpdateBuildTriggerRequest, +): + transport_class = transports.CloudBuildRestTransport + + request_init = {} + request_init["project_id"] = "" + request_init["trigger_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_build_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["projectId"] = "project_id_value" + jsonified_request["triggerId"] = "trigger_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_build_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "projectId" in jsonified_request + assert jsonified_request["projectId"] == "project_id_value" + assert "triggerId" in jsonified_request + assert jsonified_request["triggerId"] == "trigger_id_value" + + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloudbuild.BuildTrigger() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = cloudbuild.BuildTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_build_trigger(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_build_trigger_rest_unset_required_fields(): + transport = transports.CloudBuildRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_build_trigger._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "projectId", + "triggerId", + "trigger", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_build_trigger_rest_interceptors(null_interceptor): + transport = transports.CloudBuildRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudBuildRestInterceptor(), + ) + client = CloudBuildClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudBuildRestInterceptor, "post_update_build_trigger" + ) as post, mock.patch.object( + transports.CloudBuildRestInterceptor, "pre_update_build_trigger" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudbuild.UpdateBuildTriggerRequest.pb( + cloudbuild.UpdateBuildTriggerRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloudbuild.BuildTrigger.to_json( + cloudbuild.BuildTrigger() + ) + + request = cloudbuild.UpdateBuildTriggerRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloudbuild.BuildTrigger() + + client.update_build_trigger( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_build_trigger_rest_bad_request( + transport: str = "rest", request_type=cloudbuild.UpdateBuildTriggerRequest +): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "trigger_id": "sample2"} + request_init["trigger"] = { + "resource_name": "resource_name_value", + "id": "id_value", + "description": "description_value", + "name": "name_value", + "tags": ["tags_value1", "tags_value2"], + "trigger_template": { + "project_id": "project_id_value", + "repo_name": "repo_name_value", + "branch_name": "branch_name_value", + "tag_name": "tag_name_value", + "commit_sha": "commit_sha_value", + "dir_": "dir__value", + "invert_regex": True, + "substitutions": {}, + }, + "github": { + "installation_id": 1598, + "owner": "owner_value", + "name": "name_value", + "pull_request": { + "branch": "branch_value", + "comment_control": 1, + "invert_regex": True, + }, + "push": { + "branch": "branch_value", + "tag": "tag_value", + "invert_regex": True, + }, + }, + "pubsub_config": { + "subscription": "subscription_value", + "topic": "topic_value", + "service_account_email": "service_account_email_value", + "state": 1, + }, + "webhook_config": {"secret": "secret_value", "state": 1}, + "autodetect": True, + "build": { + "name": "name_value", + "id": "id_value", + "project_id": "project_id_value", + "status": 10, + "status_detail": "status_detail_value", + "source": { + "storage_source": { + "bucket": "bucket_value", + "object_": "object__value", + "generation": 1068, + }, + "repo_source": {}, + "storage_source_manifest": { + "bucket": "bucket_value", + "object_": "object__value", + "generation": 1068, + }, + }, + "steps": [ + { + "name": "name_value", + "env": ["env_value1", "env_value2"], + "args": ["args_value1", "args_value2"], + "dir_": "dir__value", + "id": "id_value", + "wait_for": ["wait_for_value1", "wait_for_value2"], + "entrypoint": "entrypoint_value", + "secret_env": ["secret_env_value1", "secret_env_value2"], + "volumes": [{"name": "name_value", "path": "path_value"}], + "timing": { + "start_time": {"seconds": 751, "nanos": 543}, + "end_time": {}, + }, + "pull_timing": {}, + "timeout": {"seconds": 751, "nanos": 543}, + "status": 10, + "allow_failure": True, + "exit_code": 948, + "allow_exit_codes": [1702, 1703], + "script": "script_value", + } + ], + "results": { + "images": [ + {"name": "name_value", "digest": "digest_value", "push_timing": {}} + ], + "build_step_images": [ + "build_step_images_value1", + "build_step_images_value2", + ], + "artifact_manifest": "artifact_manifest_value", + "num_artifacts": 1392, + "build_step_outputs": [ + b"build_step_outputs_blob1", + b"build_step_outputs_blob2", + ], + "artifact_timing": {}, + "python_packages": [ + { + "uri": "uri_value", + "file_hashes": { + "file_hash": [{"type_": 1, "value": b"value_blob"}] + }, + "push_timing": {}, + } + ], + "maven_artifacts": [ + {"uri": "uri_value", "file_hashes": {}, "push_timing": {}} + ], + }, + "create_time": {}, + "start_time": {}, + "finish_time": {}, + "timeout": {}, + "images": ["images_value1", "images_value2"], + "queue_ttl": {}, + "artifacts": { + "images": ["images_value1", "images_value2"], + "objects": { + "location": "location_value", + "paths": ["paths_value1", "paths_value2"], + "timing": {}, + }, + "maven_artifacts": [ + { + "repository": "repository_value", + "path": "path_value", + "artifact_id": "artifact_id_value", + "group_id": "group_id_value", + "version": "version_value", + } + ], + "python_packages": [ + { + "repository": "repository_value", + "paths": ["paths_value1", "paths_value2"], + } + ], + }, + "logs_bucket": "logs_bucket_value", + "source_provenance": { + "resolved_storage_source": {}, + "resolved_repo_source": {}, + "resolved_storage_source_manifest": {}, + "file_hashes": {}, + }, + "build_trigger_id": "build_trigger_id_value", + "options": { + "source_provenance_hash": [1], + "requested_verify_option": 1, + "machine_type": 1, + "disk_size_gb": 1261, + "substitution_option": 1, + "dynamic_substitutions": True, + "log_streaming_option": 1, + "worker_pool": "worker_pool_value", + "pool": {"name": "name_value"}, + "logging": 1, + "env": ["env_value1", "env_value2"], + "secret_env": ["secret_env_value1", "secret_env_value2"], + "volumes": {}, + }, + "log_url": "log_url_value", + "substitutions": {}, + "tags": ["tags_value1", "tags_value2"], + "secrets": [{"kms_key_name": "kms_key_name_value", "secret_env": {}}], + "timing": {}, + "approval": { + "state": 1, + "config": {"approval_required": True}, + "result": { + "approver_account": "approver_account_value", + "approval_time": {}, + "decision": 1, + "comment": "comment_value", + "url": "url_value", + }, + }, + "service_account": "service_account_value", + "available_secrets": { + "secret_manager": [ + {"version_name": "version_name_value", "env": "env_value"} + ], + "inline": [{"kms_key_name": "kms_key_name_value", "env_map": {}}], + }, + "warnings": [{"text": "text_value", "priority": 1}], + "failure_info": {"type_": 1, "detail": "detail_value"}, + }, + "filename": "filename_value", + "create_time": {}, + "disabled": True, + "substitutions": {}, + "ignored_files": ["ignored_files_value1", "ignored_files_value2"], + "included_files": ["included_files_value1", "included_files_value2"], + "filter": "filter_value", + "service_account": "service_account_value", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_build_trigger(request) + + +def test_update_build_trigger_rest_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloudbuild.BuildTrigger() + + # get arguments that satisfy an http rule for this method + sample_request = {"project_id": "sample1", "trigger_id": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( + project_id="project_id_value", + trigger_id="trigger_id_value", + trigger=cloudbuild.BuildTrigger(resource_name="resource_name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudbuild.BuildTrigger.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_build_trigger(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/projects/{project_id}/triggers/{trigger_id}" + % client.transport._host, + args[1], + ) + + +def test_update_build_trigger_rest_flattened_error(transport: str = "rest"): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_build_trigger( + cloudbuild.UpdateBuildTriggerRequest(), + project_id="project_id_value", + trigger_id="trigger_id_value", + trigger=cloudbuild.BuildTrigger(resource_name="resource_name_value"), + ) + + +def test_update_build_trigger_rest_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudbuild.RunBuildTriggerRequest, + dict, + ], +) +def test_run_build_trigger_rest(request_type): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "trigger_id": "sample2"} + request_init["source"] = { + "project_id": "project_id_value", + "repo_name": "repo_name_value", + "branch_name": "branch_name_value", + "tag_name": "tag_name_value", + "commit_sha": "commit_sha_value", + "dir_": "dir__value", + "invert_regex": True, + "substitutions": {}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.run_build_trigger(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_run_build_trigger_rest_required_fields( + request_type=cloudbuild.RunBuildTriggerRequest, +): + transport_class = transports.CloudBuildRestTransport + + request_init = {} + request_init["project_id"] = "" + request_init["trigger_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).run_build_trigger._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["projectId"] = "project_id_value" + jsonified_request["triggerId"] = "trigger_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).run_build_trigger._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("name",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "projectId" in jsonified_request + assert jsonified_request["projectId"] == "project_id_value" + assert "triggerId" in jsonified_request + assert jsonified_request["triggerId"] == "trigger_id_value" + + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.run_build_trigger(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_run_build_trigger_rest_unset_required_fields(): + transport = transports.CloudBuildRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.run_build_trigger._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("name",)) + & set( + ( + "projectId", + "triggerId", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_run_build_trigger_rest_interceptors(null_interceptor): + transport = transports.CloudBuildRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudBuildRestInterceptor(), + ) + client = CloudBuildClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CloudBuildRestInterceptor, "post_run_build_trigger" + ) as post, mock.patch.object( + transports.CloudBuildRestInterceptor, "pre_run_build_trigger" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudbuild.RunBuildTriggerRequest.pb( + cloudbuild.RunBuildTriggerRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = cloudbuild.RunBuildTriggerRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.run_build_trigger( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_run_build_trigger_rest_bad_request( + transport: str = "rest", request_type=cloudbuild.RunBuildTriggerRequest +): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "trigger_id": "sample2"} + request_init["source"] = { + "project_id": "project_id_value", + "repo_name": "repo_name_value", + "branch_name": "branch_name_value", + "tag_name": "tag_name_value", + "commit_sha": "commit_sha_value", + "dir_": "dir__value", + "invert_regex": True, + "substitutions": {}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.run_build_trigger(request) + + +def test_run_build_trigger_rest_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"project_id": "sample1", "trigger_id": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( + project_id="project_id_value", + trigger_id="trigger_id_value", + source=cloudbuild.RepoSource(project_id="project_id_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.run_build_trigger(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/projects/{project_id}/triggers/{trigger_id}:run" + % client.transport._host, + args[1], + ) + + +def test_run_build_trigger_rest_flattened_error(transport: str = "rest"): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.run_build_trigger( + cloudbuild.RunBuildTriggerRequest(), + project_id="project_id_value", + trigger_id="trigger_id_value", + source=cloudbuild.RepoSource(project_id="project_id_value"), + ) + + +def test_run_build_trigger_rest_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudbuild.ReceiveTriggerWebhookRequest, + dict, + ], +) +def test_receive_trigger_webhook_rest(request_type): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "trigger": "sample2"} + request_init["body"] = { + "content_type": "content_type_value", + "data": b"data_blob", + "extensions": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloudbuild.ReceiveTriggerWebhookResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudbuild.ReceiveTriggerWebhookResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.receive_trigger_webhook(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cloudbuild.ReceiveTriggerWebhookResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_receive_trigger_webhook_rest_interceptors(null_interceptor): + transport = transports.CloudBuildRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudBuildRestInterceptor(), + ) + client = CloudBuildClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudBuildRestInterceptor, "post_receive_trigger_webhook" + ) as post, mock.patch.object( + transports.CloudBuildRestInterceptor, "pre_receive_trigger_webhook" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudbuild.ReceiveTriggerWebhookRequest.pb( + cloudbuild.ReceiveTriggerWebhookRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloudbuild.ReceiveTriggerWebhookResponse.to_json( + cloudbuild.ReceiveTriggerWebhookResponse() + ) + + request = cloudbuild.ReceiveTriggerWebhookRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloudbuild.ReceiveTriggerWebhookResponse() + + client.receive_trigger_webhook( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_receive_trigger_webhook_rest_bad_request( + transport: str = "rest", request_type=cloudbuild.ReceiveTriggerWebhookRequest +): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project_id": "sample1", "trigger": "sample2"} + request_init["body"] = { + "content_type": "content_type_value", + "data": b"data_blob", + "extensions": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.receive_trigger_webhook(request) + + +def test_receive_trigger_webhook_rest_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudbuild.CreateWorkerPoolRequest, + dict, + ], +) +def test_create_worker_pool_rest(request_type): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["worker_pool"] = { + "name": "name_value", + "display_name": "display_name_value", + "uid": "uid_value", + "annotations": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "state": 1, + "private_pool_v1_config": { + "worker_config": { + "machine_type": "machine_type_value", + "disk_size_gb": 1261, + }, + "network_config": { + "peered_network": "peered_network_value", + "egress_option": 1, + }, + }, + "etag": "etag_value", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_worker_pool(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_create_worker_pool_rest_required_fields( + request_type=cloudbuild.CreateWorkerPoolRequest, +): + transport_class = transports.CloudBuildRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["worker_pool_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + assert "workerPoolId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_worker_pool._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "workerPoolId" in jsonified_request + assert jsonified_request["workerPoolId"] == request_init["worker_pool_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["workerPoolId"] = "worker_pool_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_worker_pool._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "validate_only", + "worker_pool_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "workerPoolId" in jsonified_request + assert jsonified_request["workerPoolId"] == "worker_pool_id_value" + + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_worker_pool(request) + + expected_params = [ + ( + "workerPoolId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_worker_pool_rest_unset_required_fields(): + transport = transports.CloudBuildRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_worker_pool._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "validateOnly", + "workerPoolId", + ) + ) + & set( + ( + "parent", + "workerPool", + "workerPoolId", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_worker_pool_rest_interceptors(null_interceptor): + transport = transports.CloudBuildRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudBuildRestInterceptor(), + ) + client = CloudBuildClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CloudBuildRestInterceptor, "post_create_worker_pool" + ) as post, mock.patch.object( + transports.CloudBuildRestInterceptor, "pre_create_worker_pool" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudbuild.CreateWorkerPoolRequest.pb( + cloudbuild.CreateWorkerPoolRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = cloudbuild.CreateWorkerPoolRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.create_worker_pool( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_worker_pool_rest_bad_request( + transport: str = "rest", request_type=cloudbuild.CreateWorkerPoolRequest +): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["worker_pool"] = { + "name": "name_value", + "display_name": "display_name_value", + "uid": "uid_value", + "annotations": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "state": 1, + "private_pool_v1_config": { + "worker_config": { + "machine_type": "machine_type_value", + "disk_size_gb": 1261, + }, + "network_config": { + "peered_network": "peered_network_value", + "egress_option": 1, + }, + }, + "etag": "etag_value", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_worker_pool(request) + + +def test_create_worker_pool_rest_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + worker_pool=cloudbuild.WorkerPool(name="name_value"), + worker_pool_id="worker_pool_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_worker_pool(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/workerPools" + % client.transport._host, + args[1], + ) + + +def test_create_worker_pool_rest_flattened_error(transport: str = "rest"): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_worker_pool( + cloudbuild.CreateWorkerPoolRequest(), + parent="parent_value", + worker_pool=cloudbuild.WorkerPool(name="name_value"), + worker_pool_id="worker_pool_id_value", + ) + + +def test_create_worker_pool_rest_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudbuild.GetWorkerPoolRequest, + dict, + ], +) +def test_get_worker_pool_rest(request_type): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/workerPools/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloudbuild.WorkerPool( + name="name_value", + display_name="display_name_value", + uid="uid_value", + state=cloudbuild.WorkerPool.State.CREATING, + etag="etag_value", + private_pool_v1_config=cloudbuild.PrivatePoolV1Config( + worker_config=cloudbuild.PrivatePoolV1Config.WorkerConfig( + machine_type="machine_type_value" + ) + ), + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudbuild.WorkerPool.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_worker_pool(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, cloudbuild.WorkerPool) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.uid == "uid_value" + assert response.state == cloudbuild.WorkerPool.State.CREATING + assert response.etag == "etag_value" + + +def test_get_worker_pool_rest_required_fields( + request_type=cloudbuild.GetWorkerPoolRequest, +): + transport_class = transports.CloudBuildRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_worker_pool._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_worker_pool._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloudbuild.WorkerPool() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = cloudbuild.WorkerPool.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_worker_pool(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_worker_pool_rest_unset_required_fields(): + transport = transports.CloudBuildRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_worker_pool._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_worker_pool_rest_interceptors(null_interceptor): + transport = transports.CloudBuildRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudBuildRestInterceptor(), + ) + client = CloudBuildClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudBuildRestInterceptor, "post_get_worker_pool" + ) as post, mock.patch.object( + transports.CloudBuildRestInterceptor, "pre_get_worker_pool" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudbuild.GetWorkerPoolRequest.pb( + cloudbuild.GetWorkerPoolRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloudbuild.WorkerPool.to_json( + cloudbuild.WorkerPool() + ) + + request = cloudbuild.GetWorkerPoolRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloudbuild.WorkerPool() + + client.get_worker_pool( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_worker_pool_rest_bad_request( + transport: str = "rest", request_type=cloudbuild.GetWorkerPoolRequest +): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/workerPools/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_worker_pool(request) + + +def test_get_worker_pool_rest_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloudbuild.WorkerPool() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/workerPools/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudbuild.WorkerPool.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_worker_pool(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/workerPools/*}" + % client.transport._host, + args[1], + ) + + +def test_get_worker_pool_rest_flattened_error(transport: str = "rest"): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_worker_pool( + cloudbuild.GetWorkerPoolRequest(), + name="name_value", + ) + + +def test_get_worker_pool_rest_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudbuild.DeleteWorkerPoolRequest, + dict, + ], +) +def test_delete_worker_pool_rest(request_type): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/workerPools/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_worker_pool(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_delete_worker_pool_rest_required_fields( + request_type=cloudbuild.DeleteWorkerPoolRequest, +): + transport_class = transports.CloudBuildRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_worker_pool._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_worker_pool._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "allow_missing", + "etag", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_worker_pool(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_worker_pool_rest_unset_required_fields(): + transport = transports.CloudBuildRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_worker_pool._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "allowMissing", + "etag", + "validateOnly", + ) + ) + & set(("name",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_worker_pool_rest_interceptors(null_interceptor): + transport = transports.CloudBuildRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudBuildRestInterceptor(), + ) + client = CloudBuildClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CloudBuildRestInterceptor, "post_delete_worker_pool" + ) as post, mock.patch.object( + transports.CloudBuildRestInterceptor, "pre_delete_worker_pool" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudbuild.DeleteWorkerPoolRequest.pb( + cloudbuild.DeleteWorkerPoolRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = cloudbuild.DeleteWorkerPoolRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.delete_worker_pool( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_worker_pool_rest_bad_request( + transport: str = "rest", request_type=cloudbuild.DeleteWorkerPoolRequest +): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/workerPools/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_worker_pool(request) + + +def test_delete_worker_pool_rest_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/workerPools/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_worker_pool(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/workerPools/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_worker_pool_rest_flattened_error(transport: str = "rest"): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_worker_pool( + cloudbuild.DeleteWorkerPoolRequest(), + name="name_value", + ) + + +def test_delete_worker_pool_rest_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudbuild.UpdateWorkerPoolRequest, + dict, + ], +) +def test_update_worker_pool_rest(request_type): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = { + "worker_pool": { + "name": "projects/sample1/locations/sample2/workerPools/sample3" + } + } + request_init["worker_pool"] = { + "name": "projects/sample1/locations/sample2/workerPools/sample3", + "display_name": "display_name_value", + "uid": "uid_value", + "annotations": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "state": 1, + "private_pool_v1_config": { + "worker_config": { + "machine_type": "machine_type_value", + "disk_size_gb": 1261, + }, + "network_config": { + "peered_network": "peered_network_value", + "egress_option": 1, + }, + }, + "etag": "etag_value", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_worker_pool(request) + + # Establish that the response is the type that we expect. + assert response.operation.name == "operations/spam" + + +def test_update_worker_pool_rest_required_fields( + request_type=cloudbuild.UpdateWorkerPoolRequest, +): + transport_class = transports.CloudBuildRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_worker_pool._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_worker_pool._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "update_mask", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_worker_pool(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_worker_pool_rest_unset_required_fields(): + transport = transports.CloudBuildRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_worker_pool._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "updateMask", + "validateOnly", + ) + ) + & set(("workerPool",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_worker_pool_rest_interceptors(null_interceptor): + transport = transports.CloudBuildRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudBuildRestInterceptor(), + ) + client = CloudBuildClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + operation.Operation, "_set_result_from_operation" + ), mock.patch.object( + transports.CloudBuildRestInterceptor, "post_update_worker_pool" + ) as post, mock.patch.object( + transports.CloudBuildRestInterceptor, "pre_update_worker_pool" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudbuild.UpdateWorkerPoolRequest.pb( + cloudbuild.UpdateWorkerPoolRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = json_format.MessageToJson( + operations_pb2.Operation() + ) + + request = cloudbuild.UpdateWorkerPoolRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + + client.update_worker_pool( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_worker_pool_rest_bad_request( + transport: str = "rest", request_type=cloudbuild.UpdateWorkerPoolRequest +): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "worker_pool": { + "name": "projects/sample1/locations/sample2/workerPools/sample3" + } + } + request_init["worker_pool"] = { + "name": "projects/sample1/locations/sample2/workerPools/sample3", + "display_name": "display_name_value", + "uid": "uid_value", + "annotations": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "delete_time": {}, + "state": 1, + "private_pool_v1_config": { + "worker_config": { + "machine_type": "machine_type_value", + "disk_size_gb": 1261, + }, + "network_config": { + "peered_network": "peered_network_value", + "egress_option": 1, + }, + }, + "etag": "etag_value", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_worker_pool(request) + + +def test_update_worker_pool_rest_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "worker_pool": { + "name": "projects/sample1/locations/sample2/workerPools/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + worker_pool=cloudbuild.WorkerPool(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_worker_pool(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{worker_pool.name=projects/*/locations/*/workerPools/*}" + % client.transport._host, + args[1], + ) + + +def test_update_worker_pool_rest_flattened_error(transport: str = "rest"): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_worker_pool( + cloudbuild.UpdateWorkerPoolRequest(), + worker_pool=cloudbuild.WorkerPool(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_worker_pool_rest_error(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + cloudbuild.ListWorkerPoolsRequest, + dict, + ], +) +def test_list_worker_pools_rest(request_type): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloudbuild.ListWorkerPoolsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudbuild.ListWorkerPoolsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_worker_pools(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListWorkerPoolsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_worker_pools_rest_required_fields( + request_type=cloudbuild.ListWorkerPoolsRequest, +): + transport_class = transports.CloudBuildRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_worker_pools._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_worker_pools._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = cloudbuild.ListWorkerPoolsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = cloudbuild.ListWorkerPoolsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_worker_pools(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_worker_pools_rest_unset_required_fields(): + transport = transports.CloudBuildRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_worker_pools._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_worker_pools_rest_interceptors(null_interceptor): + transport = transports.CloudBuildRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.CloudBuildRestInterceptor(), + ) + client = CloudBuildClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.CloudBuildRestInterceptor, "post_list_worker_pools" + ) as post, mock.patch.object( + transports.CloudBuildRestInterceptor, "pre_list_worker_pools" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = cloudbuild.ListWorkerPoolsRequest.pb( + cloudbuild.ListWorkerPoolsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = cloudbuild.ListWorkerPoolsResponse.to_json( + cloudbuild.ListWorkerPoolsResponse() + ) + + request = cloudbuild.ListWorkerPoolsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = cloudbuild.ListWorkerPoolsResponse() + + client.list_worker_pools( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_worker_pools_rest_bad_request( + transport: str = "rest", request_type=cloudbuild.ListWorkerPoolsRequest +): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_worker_pools(request) + + +def test_list_worker_pools_rest_flattened(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = cloudbuild.ListWorkerPoolsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = cloudbuild.ListWorkerPoolsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_worker_pools(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/workerPools" + % client.transport._host, + args[1], + ) + + +def test_list_worker_pools_rest_flattened_error(transport: str = "rest"): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_worker_pools( + cloudbuild.ListWorkerPoolsRequest(), + parent="parent_value", + ) + + +def test_list_worker_pools_rest_pager(transport: str = "rest"): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + cloudbuild.ListWorkerPoolsResponse( + worker_pools=[ + cloudbuild.WorkerPool(), + cloudbuild.WorkerPool(), + cloudbuild.WorkerPool(), + ], + next_page_token="abc", + ), + cloudbuild.ListWorkerPoolsResponse( + worker_pools=[], + next_page_token="def", + ), + cloudbuild.ListWorkerPoolsResponse( + worker_pools=[ + cloudbuild.WorkerPool(), + ], + next_page_token="ghi", + ), + cloudbuild.ListWorkerPoolsResponse( + worker_pools=[ + cloudbuild.WorkerPool(), + cloudbuild.WorkerPool(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + cloudbuild.ListWorkerPoolsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_worker_pools(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, cloudbuild.WorkerPool) for i in results) + + pages = list(client.list_worker_pools(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.CloudBuildGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.CloudBuildGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudBuildClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.CloudBuildGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = CloudBuildClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = CloudBuildClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.CloudBuildGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = CloudBuildClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.CloudBuildGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = CloudBuildClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.CloudBuildGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.CloudBuildGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudBuildGrpcTransport, + transports.CloudBuildGrpcAsyncIOTransport, + transports.CloudBuildRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = CloudBuildClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.CloudBuildGrpcTransport, + ) + + +def test_cloud_build_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.CloudBuildTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_cloud_build_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.devtools.cloudbuild_v1.services.cloud_build.transports.CloudBuildTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.CloudBuildTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "create_build", + "get_build", + "list_builds", + "cancel_build", + "retry_build", + "approve_build", + "create_build_trigger", + "get_build_trigger", + "list_build_triggers", + "delete_build_trigger", + "update_build_trigger", + "run_build_trigger", + "receive_trigger_webhook", + "create_worker_pool", + "get_worker_pool", + "delete_worker_pool", + "update_worker_pool", + "list_worker_pools", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_cloud_build_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.cloud.devtools.cloudbuild_v1.services.cloud_build.transports.CloudBuildTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) @@ -6069,6 +12477,7 @@ def test_cloud_build_transport_auth_adc(transport_class): [ transports.CloudBuildGrpcTransport, transports.CloudBuildGrpcAsyncIOTransport, + transports.CloudBuildRestTransport, ], ) def test_cloud_build_transport_auth_gdch_credentials(transport_class): @@ -6163,11 +12572,40 @@ def test_cloud_build_grpc_transport_client_cert_source_for_mtls(transport_class) ) +def test_cloud_build_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.CloudBuildRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +def test_cloud_build_rest_lro_client(): + client = CloudBuildClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + @pytest.mark.parametrize( "transport_name", [ "grpc", "grpc_asyncio", + "rest", ], ) def test_cloud_build_host_no_port(transport_name): @@ -6178,7 +12616,11 @@ def test_cloud_build_host_no_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("cloudbuild.googleapis.com:443") + assert client.transport._host == ( + "cloudbuild.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://cloudbuild.googleapis.com" + ) @pytest.mark.parametrize( @@ -6186,6 +12628,7 @@ def test_cloud_build_host_no_port(transport_name): [ "grpc", "grpc_asyncio", + "rest", ], ) def test_cloud_build_host_with_port(transport_name): @@ -6196,7 +12639,84 @@ def test_cloud_build_host_with_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("cloudbuild.googleapis.com:8000") + assert client.transport._host == ( + "cloudbuild.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://cloudbuild.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_cloud_build_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = CloudBuildClient( + credentials=creds1, + transport=transport_name, + ) + client2 = CloudBuildClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_build._session + session2 = client2.transport.create_build._session + assert session1 != session2 + session1 = client1.transport.get_build._session + session2 = client2.transport.get_build._session + assert session1 != session2 + session1 = client1.transport.list_builds._session + session2 = client2.transport.list_builds._session + assert session1 != session2 + session1 = client1.transport.cancel_build._session + session2 = client2.transport.cancel_build._session + assert session1 != session2 + session1 = client1.transport.retry_build._session + session2 = client2.transport.retry_build._session + assert session1 != session2 + session1 = client1.transport.approve_build._session + session2 = client2.transport.approve_build._session + assert session1 != session2 + session1 = client1.transport.create_build_trigger._session + session2 = client2.transport.create_build_trigger._session + assert session1 != session2 + session1 = client1.transport.get_build_trigger._session + session2 = client2.transport.get_build_trigger._session + assert session1 != session2 + session1 = client1.transport.list_build_triggers._session + session2 = client2.transport.list_build_triggers._session + assert session1 != session2 + session1 = client1.transport.delete_build_trigger._session + session2 = client2.transport.delete_build_trigger._session + assert session1 != session2 + session1 = client1.transport.update_build_trigger._session + session2 = client2.transport.update_build_trigger._session + assert session1 != session2 + session1 = client1.transport.run_build_trigger._session + session2 = client2.transport.run_build_trigger._session + assert session1 != session2 + session1 = client1.transport.receive_trigger_webhook._session + session2 = client2.transport.receive_trigger_webhook._session + assert session1 != session2 + session1 = client1.transport.create_worker_pool._session + session2 = client2.transport.create_worker_pool._session + assert session1 != session2 + session1 = client1.transport.get_worker_pool._session + session2 = client2.transport.get_worker_pool._session + assert session1 != session2 + session1 = client1.transport.delete_worker_pool._session + session2 = client2.transport.delete_worker_pool._session + assert session1 != session2 + session1 = client1.transport.update_worker_pool._session + session2 = client2.transport.update_worker_pool._session + assert session1 != session2 + session1 = client1.transport.list_worker_pools._session + session2 = client2.transport.list_worker_pools._session + assert session1 != session2 def test_cloud_build_grpc_transport_channel(): @@ -6714,6 +13234,7 @@ async def test_transport_close_async(): def test_transport_close(): transports = { + "rest": "_session", "grpc": "_grpc_channel", } @@ -6731,6 +13252,7 @@ def test_transport_close(): def test_client_ctx(): transports = [ + "rest", "grpc", ] for transport in transports: diff --git a/tests/unit/gapic/cloudbuild_v2/test_cloud_build.py b/tests/unit/gapic/cloudbuild_v2/test_cloud_build.py deleted file mode 100644 index ffefdd49..00000000 --- a/tests/unit/gapic/cloudbuild_v2/test_cloud_build.py +++ /dev/null @@ -1,2499 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os - -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -from collections.abc import Iterable -from google.protobuf import json_format -import json -import math -import pytest -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers -from requests import Response -from requests import Request, PreparedRequest -from requests.sessions import Session -from google.protobuf import json_format - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import path_template -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.devtools.cloudbuild_v2.services.cloud_build import ( - CloudBuildAsyncClient, -) -from google.cloud.devtools.cloudbuild_v2.services.cloud_build import CloudBuildClient -from google.cloud.devtools.cloudbuild_v2.services.cloud_build import transports -from google.cloud.location import locations_pb2 -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import options_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.longrunning import operations_pb2 -from google.oauth2 import service_account -import google.auth - - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return ( - "foo.googleapis.com" - if ("localhost" in client.DEFAULT_ENDPOINT) - else client.DEFAULT_ENDPOINT - ) - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert CloudBuildClient._get_default_mtls_endpoint(None) is None - assert ( - CloudBuildClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - ) - assert ( - CloudBuildClient._get_default_mtls_endpoint(api_mtls_endpoint) - == api_mtls_endpoint - ) - assert ( - CloudBuildClient._get_default_mtls_endpoint(sandbox_endpoint) - == sandbox_mtls_endpoint - ) - assert ( - CloudBuildClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) - == sandbox_mtls_endpoint - ) - assert CloudBuildClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - - -@pytest.mark.parametrize( - "client_class,transport_name", - [ - (CloudBuildClient, "grpc"), - (CloudBuildAsyncClient, "grpc_asyncio"), - (CloudBuildClient, "rest"), - ], -) -def test_cloud_build_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object( - service_account.Credentials, "from_service_account_info" - ) as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - "cloudbuild.googleapis.com:443" - if transport_name in ["grpc", "grpc_asyncio"] - else "https://cloudbuild.googleapis.com" - ) - - -@pytest.mark.parametrize( - "transport_class,transport_name", - [ - (transports.CloudBuildGrpcTransport, "grpc"), - (transports.CloudBuildGrpcAsyncIOTransport, "grpc_asyncio"), - (transports.CloudBuildRestTransport, "rest"), - ], -) -def test_cloud_build_client_service_account_always_use_jwt( - transport_class, transport_name -): - with mock.patch.object( - service_account.Credentials, "with_always_use_jwt_access", create=True - ) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object( - service_account.Credentials, "with_always_use_jwt_access", create=True - ) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize( - "client_class,transport_name", - [ - (CloudBuildClient, "grpc"), - (CloudBuildAsyncClient, "grpc_asyncio"), - (CloudBuildClient, "rest"), - ], -) -def test_cloud_build_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object( - service_account.Credentials, "from_service_account_file" - ) as factory: - factory.return_value = creds - client = client_class.from_service_account_file( - "dummy/file/path.json", transport=transport_name - ) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json( - "dummy/file/path.json", transport=transport_name - ) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - "cloudbuild.googleapis.com:443" - if transport_name in ["grpc", "grpc_asyncio"] - else "https://cloudbuild.googleapis.com" - ) - - -def test_cloud_build_client_get_transport_class(): - transport = CloudBuildClient.get_transport_class() - available_transports = [ - transports.CloudBuildGrpcTransport, - transports.CloudBuildRestTransport, - ] - assert transport in available_transports - - transport = CloudBuildClient.get_transport_class("grpc") - assert transport == transports.CloudBuildGrpcTransport - - -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (CloudBuildClient, transports.CloudBuildGrpcTransport, "grpc"), - ( - CloudBuildAsyncClient, - transports.CloudBuildGrpcAsyncIOTransport, - "grpc_asyncio", - ), - (CloudBuildClient, transports.CloudBuildRestTransport, "rest"), - ], -) -@mock.patch.object( - CloudBuildClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudBuildClient) -) -@mock.patch.object( - CloudBuildAsyncClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(CloudBuildAsyncClient), -) -def test_cloud_build_client_client_options( - client_class, transport_class, transport_name -): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(CloudBuildClient, "get_transport_class") as gtc: - transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(CloudBuildClient, "get_transport_class") as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): - client = client_class(transport=transport_name) - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict( - os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} - ): - with pytest.raises(ValueError): - client = client_class(transport=transport_name) - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions( - api_audience="https://language.googleapis.com" - ) - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com", - ) - - -@pytest.mark.parametrize( - "client_class,transport_class,transport_name,use_client_cert_env", - [ - (CloudBuildClient, transports.CloudBuildGrpcTransport, "grpc", "true"), - ( - CloudBuildAsyncClient, - transports.CloudBuildGrpcAsyncIOTransport, - "grpc_asyncio", - "true", - ), - (CloudBuildClient, transports.CloudBuildGrpcTransport, "grpc", "false"), - ( - CloudBuildAsyncClient, - transports.CloudBuildGrpcAsyncIOTransport, - "grpc_asyncio", - "false", - ), - (CloudBuildClient, transports.CloudBuildRestTransport, "rest", "true"), - (CloudBuildClient, transports.CloudBuildRestTransport, "rest", "false"), - ], -) -@mock.patch.object( - CloudBuildClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudBuildClient) -) -@mock.patch.object( - CloudBuildAsyncClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(CloudBuildAsyncClient), -) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_cloud_build_client_mtls_env_auto( - client_class, transport_class, transport_name, use_client_cert_env -): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict( - os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} - ): - options = client_options.ClientOptions( - client_cert_source=client_cert_source_callback - ) - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict( - os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} - ): - with mock.patch.object(transport_class, "__init__") as patched: - with mock.patch( - "google.auth.transport.mtls.has_default_client_cert_source", - return_value=True, - ): - with mock.patch( - "google.auth.transport.mtls.default_client_cert_source", - return_value=client_cert_source_callback, - ): - if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict( - os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} - ): - with mock.patch.object(transport_class, "__init__") as patched: - with mock.patch( - "google.auth.transport.mtls.has_default_client_cert_source", - return_value=False, - ): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [CloudBuildClient, CloudBuildAsyncClient]) -@mock.patch.object( - CloudBuildClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudBuildClient) -) -@mock.patch.object( - CloudBuildAsyncClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(CloudBuildAsyncClient), -) -def test_cloud_build_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions( - client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint - ) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( - options - ) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions( - client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint - ) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( - options - ) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch( - "google.auth.transport.mtls.has_default_client_cert_source", - return_value=False, - ): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch( - "google.auth.transport.mtls.has_default_client_cert_source", - return_value=True, - ): - with mock.patch( - "google.auth.transport.mtls.default_client_cert_source", - return_value=mock_client_cert_source, - ): - ( - api_endpoint, - cert_source, - ) = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (CloudBuildClient, transports.CloudBuildGrpcTransport, "grpc"), - ( - CloudBuildAsyncClient, - transports.CloudBuildGrpcAsyncIOTransport, - "grpc_asyncio", - ), - (CloudBuildClient, transports.CloudBuildRestTransport, "rest"), - ], -) -def test_cloud_build_client_client_options_scopes( - client_class, transport_class, transport_name -): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize( - "client_class,transport_class,transport_name,grpc_helpers", - [ - (CloudBuildClient, transports.CloudBuildGrpcTransport, "grpc", grpc_helpers), - ( - CloudBuildAsyncClient, - transports.CloudBuildGrpcAsyncIOTransport, - "grpc_asyncio", - grpc_helpers_async, - ), - (CloudBuildClient, transports.CloudBuildRestTransport, "rest", None), - ], -) -def test_cloud_build_client_client_options_credentials_file( - client_class, transport_class, transport_name, grpc_helpers -): - # Check the case credentials file is provided. - options = client_options.ClientOptions(credentials_file="credentials.json") - - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -def test_cloud_build_client_client_options_from_dict(): - with mock.patch( - "google.cloud.devtools.cloudbuild_v2.services.cloud_build.transports.CloudBuildGrpcTransport.__init__" - ) as grpc_transport: - grpc_transport.return_value = None - client = CloudBuildClient(client_options={"api_endpoint": "squid.clam.whelk"}) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize( - "client_class,transport_class,transport_name,grpc_helpers", - [ - (CloudBuildClient, transports.CloudBuildGrpcTransport, "grpc", grpc_helpers), - ( - CloudBuildAsyncClient, - transports.CloudBuildGrpcAsyncIOTransport, - "grpc_asyncio", - grpc_helpers_async, - ), - ], -) -def test_cloud_build_client_create_channel_credentials_file( - client_class, transport_class, transport_name, grpc_helpers -): - # Check the case credentials file is provided. - options = client_options.ClientOptions(credentials_file="credentials.json") - - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "cloudbuild.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=(), - scopes=None, - default_host="cloudbuild.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.CloudBuildGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.CloudBuildGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = CloudBuildClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.CloudBuildGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = CloudBuildClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = mock.Mock() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = CloudBuildClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.CloudBuildGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = CloudBuildClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.CloudBuildGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = CloudBuildClient(transport=transport) - assert client.transport is transport - - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.CloudBuildGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.CloudBuildGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.CloudBuildGrpcTransport, - transports.CloudBuildGrpcAsyncIOTransport, - transports.CloudBuildRestTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - - -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "rest", - ], -) -def test_transport_kind(transport_name): - transport = CloudBuildClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name - - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.CloudBuildGrpcTransport, - ) - - -def test_cloud_build_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.CloudBuildTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json", - ) - - -def test_cloud_build_base_transport(): - # Instantiate the base transport. - with mock.patch( - "google.cloud.devtools.cloudbuild_v2.services.cloud_build.transports.CloudBuildTransport.__init__" - ) as Transport: - Transport.return_value = None - transport = transports.CloudBuildTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - "set_iam_policy", - "get_iam_policy", - "test_iam_permissions", - "get_operation", - "cancel_operation", - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Catch all for all remaining methods and properties - remainder = [ - "kind", - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_cloud_build_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.devtools.cloudbuild_v2.services.cloud_build.transports.CloudBuildTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.CloudBuildTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=None, - default_scopes=(), - quota_project_id="octopus", - ) - - -def test_cloud_build_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( - "google.cloud.devtools.cloudbuild_v2.services.cloud_build.transports.CloudBuildTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.CloudBuildTransport() - adc.assert_called_once() - - -def test_cloud_build_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - CloudBuildClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=(), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.CloudBuildGrpcTransport, - transports.CloudBuildGrpcAsyncIOTransport, - ], -) -def test_cloud_build_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=(), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.CloudBuildGrpcTransport, - transports.CloudBuildGrpcAsyncIOTransport, - transports.CloudBuildRestTransport, - ], -) -def test_cloud_build_transport_auth_gdch_credentials(transport_class): - host = "https://language.com" - api_audience_tests = [None, "https://language2.com"] - api_audience_expect = [host, "https://language2.com"] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, "default", autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock( - return_value=gdch_mock - ) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with(e) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.CloudBuildGrpcTransport, grpc_helpers), - (transports.CloudBuildGrpcAsyncIOTransport, grpc_helpers_async), - ], -) -def test_cloud_build_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - - create_channel.assert_called_with( - "cloudbuild.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=(), - scopes=["1", "2"], - default_host="cloudbuild.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize( - "transport_class", - [transports.CloudBuildGrpcTransport, transports.CloudBuildGrpcAsyncIOTransport], -) -def test_cloud_build_grpc_transport_client_cert_source_for_mtls(transport_class): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds, - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback, - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, private_key=expected_key - ) - - -def test_cloud_build_http_transport_client_cert_source_for_mtls(): - cred = ga_credentials.AnonymousCredentials() - with mock.patch( - "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" - ) as mock_configure_mtls_channel: - transports.CloudBuildRestTransport( - credentials=cred, client_cert_source_for_mtls=client_cert_source_callback - ) - mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) - - -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "grpc_asyncio", - "rest", - ], -) -def test_cloud_build_host_no_port(transport_name): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions( - api_endpoint="cloudbuild.googleapis.com" - ), - transport=transport_name, - ) - assert client.transport._host == ( - "cloudbuild.googleapis.com:443" - if transport_name in ["grpc", "grpc_asyncio"] - else "https://cloudbuild.googleapis.com" - ) - - -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "grpc_asyncio", - "rest", - ], -) -def test_cloud_build_host_with_port(transport_name): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions( - api_endpoint="cloudbuild.googleapis.com:8000" - ), - transport=transport_name, - ) - assert client.transport._host == ( - "cloudbuild.googleapis.com:8000" - if transport_name in ["grpc", "grpc_asyncio"] - else "https://cloudbuild.googleapis.com:8000" - ) - - -@pytest.mark.parametrize( - "transport_name", - [ - "rest", - ], -) -def test_cloud_build_client_transport_session_collision(transport_name): - creds1 = ga_credentials.AnonymousCredentials() - creds2 = ga_credentials.AnonymousCredentials() - client1 = CloudBuildClient( - credentials=creds1, - transport=transport_name, - ) - client2 = CloudBuildClient( - credentials=creds2, - transport=transport_name, - ) - - -def test_cloud_build_grpc_transport_channel(): - channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.CloudBuildGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_cloud_build_grpc_asyncio_transport_channel(): - channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.CloudBuildGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize( - "transport_class", - [transports.CloudBuildGrpcTransport, transports.CloudBuildGrpcAsyncIOTransport], -) -def test_cloud_build_transport_channel_mtls_with_client_cert_source(transport_class): - with mock.patch( - "grpc.ssl_channel_credentials", autospec=True - ) as grpc_ssl_channel_cred: - with mock.patch.object( - transport_class, "create_channel" - ) as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize( - "transport_class", - [transports.CloudBuildGrpcTransport, transports.CloudBuildGrpcAsyncIOTransport], -) -def test_cloud_build_transport_channel_mtls_with_adc(transport_class): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object( - transport_class, "create_channel" - ) as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_common_billing_account_path(): - billing_account = "squid" - expected = "billingAccounts/{billing_account}".format( - billing_account=billing_account, - ) - actual = CloudBuildClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "clam", - } - path = CloudBuildClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = CloudBuildClient.parse_common_billing_account_path(path) - assert expected == actual - - -def test_common_folder_path(): - folder = "whelk" - expected = "folders/{folder}".format( - folder=folder, - ) - actual = CloudBuildClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "octopus", - } - path = CloudBuildClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = CloudBuildClient.parse_common_folder_path(path) - assert expected == actual - - -def test_common_organization_path(): - organization = "oyster" - expected = "organizations/{organization}".format( - organization=organization, - ) - actual = CloudBuildClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "nudibranch", - } - path = CloudBuildClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = CloudBuildClient.parse_common_organization_path(path) - assert expected == actual - - -def test_common_project_path(): - project = "cuttlefish" - expected = "projects/{project}".format( - project=project, - ) - actual = CloudBuildClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "mussel", - } - path = CloudBuildClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = CloudBuildClient.parse_common_project_path(path) - assert expected == actual - - -def test_common_location_path(): - project = "winkle" - location = "nautilus" - expected = "projects/{project}/locations/{location}".format( - project=project, - location=location, - ) - actual = CloudBuildClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "scallop", - "location": "abalone", - } - path = CloudBuildClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = CloudBuildClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object( - transports.CloudBuildTransport, "_prep_wrapped_messages" - ) as prep: - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object( - transports.CloudBuildTransport, "_prep_wrapped_messages" - ) as prep: - transport_class = CloudBuildClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -@pytest.mark.asyncio -async def test_transport_close_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object( - type(getattr(client.transport, "grpc_channel")), "close" - ) as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_get_iam_policy_rest_bad_request( - transport: str = "rest", request_type=iam_policy_pb2.GetIamPolicyRequest -): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"resource": "projects/sample1/locations/sample2/connections/sample3"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_iam_policy(request) - - -@pytest.mark.parametrize( - "request_type", - [ - iam_policy_pb2.GetIamPolicyRequest, - dict, - ], -) -def test_get_iam_policy_rest(request_type): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = { - "resource": "projects/sample1/locations/sample2/connections/sample3" - } - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_iam_policy(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - -def test_set_iam_policy_rest_bad_request( - transport: str = "rest", request_type=iam_policy_pb2.SetIamPolicyRequest -): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"resource": "projects/sample1/locations/sample2/connections/sample3"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.set_iam_policy(request) - - -@pytest.mark.parametrize( - "request_type", - [ - iam_policy_pb2.SetIamPolicyRequest, - dict, - ], -) -def test_set_iam_policy_rest(request_type): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = { - "resource": "projects/sample1/locations/sample2/connections/sample3" - } - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.set_iam_policy(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - -def test_test_iam_permissions_rest_bad_request( - transport: str = "rest", request_type=iam_policy_pb2.TestIamPermissionsRequest -): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"resource": "projects/sample1/locations/sample2/connections/sample3"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.test_iam_permissions(request) - - -@pytest.mark.parametrize( - "request_type", - [ - iam_policy_pb2.TestIamPermissionsRequest, - dict, - ], -) -def test_test_iam_permissions_rest(request_type): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = { - "resource": "projects/sample1/locations/sample2/connections/sample3" - } - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = iam_policy_pb2.TestIamPermissionsResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.test_iam_permissions(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - - -def test_cancel_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.CancelOperationRequest -): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2/operations/sample3"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.cancel_operation(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.CancelOperationRequest, - dict, - ], -) -def test_cancel_operation_rest(request_type): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "{}" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.cancel_operation(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_get_operation_rest_bad_request( - transport: str = "rest", request_type=operations_pb2.GetOperationRequest -): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"name": "projects/sample1/locations/sample2/operations/sample3"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_operation(request) - - -@pytest.mark.parametrize( - "request_type", - [ - operations_pb2.GetOperationRequest, - dict, - ], -) -def test_get_operation_rest(request_type): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_operation(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - - -def test_cancel_operation(transport: str = "grpc"): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_cancel_operation_async(transport: str = "grpc"): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.CancelOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert response is None - - -def test_cancel_operation_field_headers(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = None - - client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_cancel_operation_field_headers_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.CancelOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.cancel_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -def test_cancel_operation_from_dict(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - response = client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -@pytest.mark.asyncio -async def test_cancel_operation_from_dict_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.cancel_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_get_operation(transport: str = "grpc"): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - response = client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - - -@pytest.mark.asyncio -async def test_get_operation_async(transport: str = "grpc"): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = operations_pb2.GetOperationRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, operations_pb2.Operation) - - -def test_get_operation_field_headers(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = operations_pb2.Operation() - - client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_get_operation_field_headers_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = operations_pb2.GetOperationRequest() - request.name = "locations" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - await client.get_operation(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "name=locations", - ) in kw["metadata"] - - -def test_get_operation_from_dict(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation() - - response = client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -@pytest.mark.asyncio -async def test_get_operation_from_dict_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_operation), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation() - ) - response = await client.get_operation( - request={ - "name": "locations", - } - ) - call.assert_called() - - -def test_set_iam_policy(transport: str = "grpc"): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.SetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy( - version=774, - etag=b"etag_blob", - ) - response = client.set_iam_policy(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" - - -@pytest.mark.asyncio -async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.SetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy( - version=774, - etag=b"etag_blob", - ) - ) - response = await client.set_iam_policy(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" - - -def test_set_iam_policy_field_headers(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - call.return_value = policy_pb2.Policy() - - client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "resource=resource/value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_set_iam_policy_field_headers_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - - await client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "resource=resource/value", - ) in kw["metadata"] - - -def test_set_iam_policy_from_dict(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - - response = client.set_iam_policy( - request={ - "resource": "resource_value", - "policy": policy_pb2.Policy(version=774), - } - ) - call.assert_called() - - -@pytest.mark.asyncio -async def test_set_iam_policy_from_dict_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - - response = await client.set_iam_policy( - request={ - "resource": "resource_value", - "policy": policy_pb2.Policy(version=774), - } - ) - call.assert_called() - - -def test_get_iam_policy(transport: str = "grpc"): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.GetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy( - version=774, - etag=b"etag_blob", - ) - - response = client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" - - -@pytest.mark.asyncio -async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.GetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy( - version=774, - etag=b"etag_blob", - ) - ) - - response = await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" - - -def test_get_iam_policy_field_headers(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - call.return_value = policy_pb2.Policy() - - client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "resource=resource/value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_get_iam_policy_field_headers_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - - await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "resource=resource/value", - ) in kw["metadata"] - - -def test_get_iam_policy_from_dict(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - - response = client.get_iam_policy( - request={ - "resource": "resource_value", - "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), - } - ) - call.assert_called() - - -@pytest.mark.asyncio -async def test_get_iam_policy_from_dict_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - - response = await client.get_iam_policy( - request={ - "resource": "resource_value", - "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), - } - ) - call.assert_called() - - -def test_test_iam_permissions(transport: str = "grpc"): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.TestIamPermissionsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse( - permissions=["permissions_value"], - ) - - response = client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - - assert response.permissions == ["permissions_value"] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.TestIamPermissionsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy_pb2.TestIamPermissionsResponse( - permissions=["permissions_value"], - ) - ) - - response = await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - - assert response.permissions == ["permissions_value"] - - -def test_test_iam_permissions_field_headers(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - - client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "resource=resource/value", - ) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_field_headers_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy_pb2.TestIamPermissionsResponse() - ) - - await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "resource=resource/value", - ) in kw["metadata"] - - -def test_test_iam_permissions_from_dict(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - - response = client.test_iam_permissions( - request={ - "resource": "resource_value", - "permissions": ["permissions_value"], - } - ) - call.assert_called() - - -@pytest.mark.asyncio -async def test_test_iam_permissions_from_dict_async(): - client = CloudBuildAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy_pb2.TestIamPermissionsResponse() - ) - - response = await client.test_iam_permissions( - request={ - "resource": "resource_value", - "permissions": ["permissions_value"], - } - ) - call.assert_called() - - -def test_transport_close(): - transports = { - "rest": "_session", - "grpc": "_grpc_channel", - } - - for transport, close_name in transports.items(): - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport - ) - with mock.patch.object( - type(getattr(client.transport, close_name)), "close" - ) as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -def test_client_ctx(): - transports = [ - "rest", - "grpc", - ] - for transport in transports: - client = CloudBuildClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - - -@pytest.mark.parametrize( - "client_class,transport_class", - [ - (CloudBuildClient, transports.CloudBuildGrpcTransport), - (CloudBuildAsyncClient, transports.CloudBuildGrpcAsyncIOTransport), - ], -) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - )