Skip to content

Commit 56da68f

Browse files
yoshi-automationsduskis
authored andcommitted
[CHANGE ME] Re-generated dataproc to pick up changes in the API or client library generator. (googleapis#8152)
1 parent 62f53f3 commit 56da68f

14 files changed

Lines changed: 292 additions & 159 deletions

File tree

dataproc/google/cloud/dataproc_v1/gapic/enums.py

Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,25 @@
1919
import enum
2020

2121

22+
class Component(enum.IntEnum):
23+
"""
24+
Cluster components that can be activated.
25+
26+
Attributes:
27+
COMPONENT_UNSPECIFIED (int): Unspecified component.
28+
ANACONDA (int): The Anaconda python distribution.
29+
HIVE_WEBHCAT (int): The Hive Web HCatalog (the REST service for accessing HCatalog).
30+
JUPYTER (int): The Jupyter Notebook.
31+
ZEPPELIN (int): The Zeppelin notebook.
32+
"""
33+
34+
COMPONENT_UNSPECIFIED = 0
35+
ANACONDA = 5
36+
HIVE_WEBHCAT = 3
37+
JUPYTER = 1
38+
ZEPPELIN = 4
39+
40+
2241
class ClusterOperationStatus(object):
2342
class State(enum.IntEnum):
2443
"""

dataproc/google/cloud/dataproc_v1/proto/clusters.proto

Lines changed: 35 additions & 36 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
// Copyright 2018 Google LLC.
1+
// Copyright 2019 Google LLC.
22
//
33
// Licensed under the Apache License, Version 2.0 (the "License");
44
// you may not use this file except in compliance with the License.
@@ -18,7 +18,7 @@ syntax = "proto3";
1818
package google.cloud.dataproc.v1;
1919

2020
import "google/api/annotations.proto";
21-
import "google/cloud/dataproc/v1/operations.proto";
21+
import "google/cloud/dataproc/v1/shared.proto";
2222
import "google/longrunning/operations.proto";
2323
import "google/protobuf/duration.proto";
2424
import "google/protobuf/field_mask.proto";
@@ -33,26 +33,23 @@ option java_package = "com.google.cloud.dataproc.v1";
3333
// of Compute Engine instances.
3434
service ClusterController {
3535
// Creates a cluster in a project.
36-
rpc CreateCluster(CreateClusterRequest)
37-
returns (google.longrunning.Operation) {
36+
rpc CreateCluster(CreateClusterRequest) returns (google.longrunning.Operation) {
3837
option (google.api.http) = {
3938
post: "/v1/projects/{project_id}/regions/{region}/clusters"
4039
body: "cluster"
4140
};
4241
}
4342

4443
// Updates a cluster in a project.
45-
rpc UpdateCluster(UpdateClusterRequest)
46-
returns (google.longrunning.Operation) {
44+
rpc UpdateCluster(UpdateClusterRequest) returns (google.longrunning.Operation) {
4745
option (google.api.http) = {
4846
patch: "/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}"
4947
body: "cluster"
5048
};
5149
}
5250

5351
// Deletes a cluster in a project.
54-
rpc DeleteCluster(DeleteClusterRequest)
55-
returns (google.longrunning.Operation) {
52+
rpc DeleteCluster(DeleteClusterRequest) returns (google.longrunning.Operation) {
5653
option (google.api.http) = {
5754
delete: "/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}"
5855
};
@@ -75,8 +72,7 @@ service ClusterController {
7572
// Gets cluster diagnostic information.
7673
// After the operation completes, the Operation.response field
7774
// contains `DiagnoseClusterOutputLocation`.
78-
rpc DiagnoseCluster(DiagnoseClusterRequest)
79-
returns (google.longrunning.Operation) {
75+
rpc DiagnoseCluster(DiagnoseClusterRequest) returns (google.longrunning.Operation) {
8076
option (google.api.http) = {
8177
post: "/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}:diagnose"
8278
body: "*"
@@ -126,15 +122,18 @@ message Cluster {
126122

127123
// The cluster config.
128124
message ClusterConfig {
129-
// Optional. A Cloud Storage staging bucket used for sharing generated
130-
// SSH keys and config. If you do not specify a staging bucket, Cloud
131-
// Dataproc will determine an appropriate Cloud Storage location (US,
125+
// Optional. A Google Cloud Storage bucket used to stage job
126+
// dependencies, config files, and job driver console output.
127+
// If you do not specify a staging bucket, Cloud
128+
// Dataproc will determine a Cloud Storage location (US,
132129
// ASIA, or EU) for your cluster's staging bucket according to the Google
133-
// Compute Engine zone where your cluster is deployed, and then it will create
134-
// and manage this project-level, per-location bucket for you.
130+
// Compute Engine zone where your cluster is deployed, and then create
131+
// and manage this project-level, per-location bucket (see
132+
// [Cloud Dataproc staging
133+
// bucket](/dataproc/docs/concepts/configuring-clusters/staging-bucket)).
135134
string config_bucket = 1;
136135

137-
// Required. The shared Compute Engine config settings for
136+
// Optional. The shared Compute Engine config settings for
138137
// all instances in a cluster.
139138
GceClusterConfig gce_cluster_config = 8;
140139

@@ -213,8 +212,8 @@ message GceClusterConfig {
213212
//
214213
// A full URL, partial URI, or short name are valid. Examples:
215214
//
216-
// * `https://www.googleapis.com/compute/v1/projects/[project_id]/regions/us-east1/sub0`
217-
// * `projects/[project_id]/regions/us-east1/sub0`
215+
// * `https://www.googleapis.com/compute/v1/projects/[project_id]/regions/us-east1/subnetworks/sub0`
216+
// * `projects/[project_id]/regions/us-east1/subnetworks/sub0`
218217
// * `sub0`
219218
string subnetwork_uri = 6;
220219

@@ -447,13 +446,13 @@ message SoftwareConfig {
447446
// such as "1.2" (including a subminor version, such as "1.2.29"), or the
448447
// ["preview"
449448
// version](/dataproc/docs/concepts/versioning/dataproc-versions#other_versions).
450-
// If unspecified, it defaults to the latest version.
449+
// If unspecified, it defaults to the latest Debian version.
451450
string image_version = 1;
452451

453452
// Optional. The properties to set on daemon config files.
454453
//
455-
// Property keys are specified in `prefix:property` format, such as
456-
// `core:fs.defaultFS`. The following are supported prefixes
454+
// Property keys are specified in `prefix:property` format, for example
455+
// `core:hadoop.tmp.dir`. The following are supported prefixes
457456
// and their mappings:
458457
//
459458
// * capacity-scheduler: `capacity-scheduler.xml`
@@ -469,6 +468,9 @@ message SoftwareConfig {
469468
// For more information, see
470469
// [Cluster properties](/dataproc/docs/concepts/cluster-properties).
471470
map<string, string> properties = 2;
471+
472+
// The set of optional components to activate on the cluster.
473+
repeated Component optional_components = 3;
472474
}
473475

474476
// Contains cluster daemon metrics, such as HDFS and YARN stats.
@@ -496,11 +498,10 @@ message CreateClusterRequest {
496498
Cluster cluster = 2;
497499

498500
// Optional. A unique id used to identify the request. If the server
499-
// receives two
500-
// [CreateClusterRequest][google.cloud.dataproc.v1.CreateClusterRequest]
501-
// requests with the same id, then the second request will be ignored and the
502-
// first [google.longrunning.Operation][google.longrunning.Operation] created
503-
// and stored in the backend is returned.
501+
// receives two [CreateClusterRequest][google.cloud.dataproc.v1.CreateClusterRequest] requests with the same
502+
// id, then the second request will be ignored and the
503+
// first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the backend
504+
// is returned.
504505
//
505506
// It is recommended to always set this value to a
506507
// [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
@@ -585,11 +586,10 @@ message UpdateClusterRequest {
585586
google.protobuf.FieldMask update_mask = 4;
586587

587588
// Optional. A unique id used to identify the request. If the server
588-
// receives two
589-
// [UpdateClusterRequest][google.cloud.dataproc.v1.UpdateClusterRequest]
590-
// requests with the same id, then the second request will be ignored and the
591-
// first [google.longrunning.Operation][google.longrunning.Operation] created
592-
// and stored in the backend is returned.
589+
// receives two [UpdateClusterRequest][google.cloud.dataproc.v1.UpdateClusterRequest] requests with the same
590+
// id, then the second request will be ignored and the
591+
// first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the
592+
// backend is returned.
593593
//
594594
// It is recommended to always set this value to a
595595
// [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).
@@ -616,11 +616,10 @@ message DeleteClusterRequest {
616616
string cluster_uuid = 4;
617617

618618
// Optional. A unique id used to identify the request. If the server
619-
// receives two
620-
// [DeleteClusterRequest][google.cloud.dataproc.v1.DeleteClusterRequest]
621-
// requests with the same id, then the second request will be ignored and the
622-
// first [google.longrunning.Operation][google.longrunning.Operation] created
623-
// and stored in the backend is returned.
619+
// receives two [DeleteClusterRequest][google.cloud.dataproc.v1.DeleteClusterRequest] requests with the same
620+
// id, then the second request will be ignored and the
621+
// first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the
622+
// backend is returned.
624623
//
625624
// It is recommended to always set this value to a
626625
// [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier).

dataproc/google/cloud/dataproc_v1/proto/clusters_pb2.py

Lines changed: 98 additions & 72 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

dataproc/google/cloud/dataproc_v1/proto/jobs.proto

Lines changed: 9 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
// Copyright 2018 Google LLC.
1+
// Copyright 2019 Google LLC.
22
//
33
// Licensed under the Apache License, Version 2.0 (the "License");
44
// you may not use this file except in compliance with the License.
@@ -473,11 +473,12 @@ message JobReference {
473473
// belongs to.
474474
string project_id = 1;
475475

476-
// Optional. The job ID, which must be unique within the project. The job ID
477-
// is generated by the server upon job submission or provided by the user as a
478-
// means to perform retries without creating duplicate jobs. The ID must
479-
// contain only letters (a-z, A-Z), numbers (0-9), underscores (_), or
480-
// hyphens (-). The maximum length is 100 characters.
476+
// Optional. The job ID, which must be unique within the project.
477+
//
478+
// The ID must contain only letters (a-z, A-Z), numbers (0-9),
479+
// underscores (_), or hyphens (-). The maximum length is 100 characters.
480+
//
481+
// If not specified by the caller, the job ID will be provided by the server.
481482
string job_id = 2;
482483
}
483484

@@ -634,8 +635,8 @@ message SubmitJobRequest {
634635
Job job = 2;
635636

636637
// Optional. A unique id used to identify the request. If the server
637-
// receives two [SubmitJobRequest][google.cloud.dataproc.v1.SubmitJobRequest]
638-
// requests with the same id, then the second request will be ignored and the
638+
// receives two [SubmitJobRequest][google.cloud.dataproc.v1.SubmitJobRequest] requests with the same
639+
// id, then the second request will be ignored and the
639640
// first [Job][google.cloud.dataproc.v1.Job] created and stored in the backend
640641
// is returned.
641642
//

dataproc/google/cloud/dataproc_v1/proto/jobs_pb2.py

Lines changed: 4 additions & 5 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

dataproc/google/cloud/dataproc_v1/proto/operations.proto

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
// Copyright 2018 Google LLC.
1+
// Copyright 2019 Google LLC.
22
//
33
// Licensed under the Apache License, Version 2.0 (the "License");
44
// you may not use this file except in compliance with the License.

dataproc/google/cloud/dataproc_v1/proto/shared.proto

Lines changed: 21 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
// Copyright 2018 Google LLC.
1+
// Copyright 2019 Google LLC.
22
//
33
// Licensed under the Apache License, Version 2.0 (the "License");
44
// you may not use this file except in compliance with the License.
@@ -15,12 +15,29 @@
1515

1616
syntax = "proto3";
1717

18-
package google.cloud.dataproc.v1beta2;
18+
package google.cloud.dataproc.v1;
1919

2020
import "google/api/annotations.proto";
2121

22-
option go_package = "google.golang.org/genproto/googleapis/cloud/dataproc/v1beta2;dataproc";
22+
option go_package = "google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataproc";
2323
option java_multiple_files = true;
2424
option java_outer_classname = "SharedProto";
25-
option java_package = "com.google.cloud.dataproc.v1beta2";
25+
option java_package = "com.google.cloud.dataproc.v1";
2626

27+
// Cluster components that can be activated.
28+
enum Component {
29+
// Unspecified component.
30+
COMPONENT_UNSPECIFIED = 0;
31+
32+
// The Anaconda python distribution.
33+
ANACONDA = 5;
34+
35+
// The Hive Web HCatalog (the REST service for accessing HCatalog).
36+
HIVE_WEBHCAT = 3;
37+
38+
// The Jupyter Notebook.
39+
JUPYTER = 1;
40+
41+
// The Zeppelin notebook.
42+
ZEPPELIN = 4;
43+
}

dataproc/google/cloud/dataproc_v1/proto/shared_pb2.py

Lines changed: 80 additions & 0 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.
Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
2+
import grpc

0 commit comments

Comments
 (0)